Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos Maven / Gradle / Ivy
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: server/yarn_server_resourcemanager_service_protos.proto
package org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto;
public final class YarnServerResourceManagerServiceProtos {
private YarnServerResourceManagerServiceProtos() {}
public static void registerAllExtensions(
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite registry) {
}
public static void registerAllExtensions(
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistry registry) {
registerAllExtensions(
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite) registry);
}
/**
* Protobuf enum {@code hadoop.yarn.DecommissionTypeProto}
*/
public enum DecommissionTypeProto
implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
/**
* NORMAL = 1;
*/
NORMAL(1),
/**
* GRACEFUL = 2;
*/
GRACEFUL(2),
/**
* FORCEFUL = 3;
*/
FORCEFUL(3),
;
/**
* NORMAL = 1;
*/
public static final int NORMAL_VALUE = 1;
/**
* GRACEFUL = 2;
*/
public static final int GRACEFUL_VALUE = 2;
/**
* FORCEFUL = 3;
*/
public static final int FORCEFUL_VALUE = 3;
public final int getNumber() {
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static DecommissionTypeProto valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static DecommissionTypeProto forNumber(int value) {
switch (value) {
case 1: return NORMAL;
case 2: return GRACEFUL;
case 3: return FORCEFUL;
default: return null;
}
}
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
DecommissionTypeProto> internalValueMap =
new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() {
public DecommissionTypeProto findValueByNumber(int number) {
return DecommissionTypeProto.forNumber(number);
}
};
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.getDescriptor().getEnumTypes().get(0);
}
private static final DecommissionTypeProto[] VALUES = values();
public static DecommissionTypeProto valueOf(
org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int value;
private DecommissionTypeProto(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:hadoop.yarn.DecommissionTypeProto)
}
/**
* Protobuf enum {@code hadoop.yarn.AttributeMappingOperationTypeProto}
*/
public enum AttributeMappingOperationTypeProto
implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
/**
* REPLACE = 1;
*/
REPLACE(1),
/**
* ADD = 2;
*/
ADD(2),
/**
* REMOVE = 3;
*/
REMOVE(3),
;
/**
* REPLACE = 1;
*/
public static final int REPLACE_VALUE = 1;
/**
* ADD = 2;
*/
public static final int ADD_VALUE = 2;
/**
* REMOVE = 3;
*/
public static final int REMOVE_VALUE = 3;
public final int getNumber() {
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static AttributeMappingOperationTypeProto valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static AttributeMappingOperationTypeProto forNumber(int value) {
switch (value) {
case 1: return REPLACE;
case 2: return ADD;
case 3: return REMOVE;
default: return null;
}
}
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
AttributeMappingOperationTypeProto> internalValueMap =
new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() {
public AttributeMappingOperationTypeProto findValueByNumber(int number) {
return AttributeMappingOperationTypeProto.forNumber(number);
}
};
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.getDescriptor().getEnumTypes().get(1);
}
private static final AttributeMappingOperationTypeProto[] VALUES = values();
public static AttributeMappingOperationTypeProto valueOf(
org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int value;
private AttributeMappingOperationTypeProto(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:hadoop.yarn.AttributeMappingOperationTypeProto)
}
public interface RefreshQueuesRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.RefreshQueuesRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional string sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
boolean hasSubClusterId();
/**
* optional string sub_cluster_id = 1;
* @return The subClusterId.
*/
java.lang.String getSubClusterId();
/**
* optional string sub_cluster_id = 1;
* @return The bytes for subClusterId.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes();
}
/**
* Protobuf type {@code hadoop.yarn.RefreshQueuesRequestProto}
*/
public static final class RefreshQueuesRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.RefreshQueuesRequestProto)
RefreshQueuesRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use RefreshQueuesRequestProto.newBuilder() to construct.
private RefreshQueuesRequestProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private RefreshQueuesRequestProto() {
subClusterId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new RefreshQueuesRequestProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshQueuesRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshQueuesRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto.Builder.class);
}
private int bitField0_;
public static final int SUB_CLUSTER_ID_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object subClusterId_ = "";
/**
* optional string sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
@java.lang.Override
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string sub_cluster_id = 1;
* @return The subClusterId.
*/
@java.lang.Override
public java.lang.String getSubClusterId() {
java.lang.Object ref = subClusterId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
subClusterId_ = s;
}
return s;
}
}
/**
* optional string sub_cluster_id = 1;
* @return The bytes for subClusterId.
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes() {
java.lang.Object ref = subClusterId_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
subClusterId_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, subClusterId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, subClusterId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto) obj;
if (hasSubClusterId() != other.hasSubClusterId()) return false;
if (hasSubClusterId()) {
if (!getSubClusterId()
.equals(other.getSubClusterId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasSubClusterId()) {
hash = (37 * hash) + SUB_CLUSTER_ID_FIELD_NUMBER;
hash = (53 * hash) + getSubClusterId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.RefreshQueuesRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.RefreshQueuesRequestProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshQueuesRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshQueuesRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
subClusterId_ = "";
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshQueuesRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.subClusterId_ = subClusterId_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto.getDefaultInstance()) return this;
if (other.hasSubClusterId()) {
subClusterId_ = other.subClusterId_;
bitField0_ |= 0x00000001;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
subClusterId_ = input.readBytes();
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object subClusterId_ = "";
/**
* optional string sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string sub_cluster_id = 1;
* @return The subClusterId.
*/
public java.lang.String getSubClusterId() {
java.lang.Object ref = subClusterId_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
subClusterId_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string sub_cluster_id = 1;
* @return The bytes for subClusterId.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes() {
java.lang.Object ref = subClusterId_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
subClusterId_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string sub_cluster_id = 1;
* @param value The subClusterId to set.
* @return This builder for chaining.
*/
public Builder setSubClusterId(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
subClusterId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional string sub_cluster_id = 1;
* @return This builder for chaining.
*/
public Builder clearSubClusterId() {
subClusterId_ = getDefaultInstance().getSubClusterId();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* optional string sub_cluster_id = 1;
* @param value The bytes for subClusterId to set.
* @return This builder for chaining.
*/
public Builder setSubClusterIdBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
subClusterId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.RefreshQueuesRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.RefreshQueuesRequestProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public RefreshQueuesRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface RefreshQueuesResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.RefreshQueuesResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.yarn.RefreshQueuesResponseProto}
*/
public static final class RefreshQueuesResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.RefreshQueuesResponseProto)
RefreshQueuesResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use RefreshQueuesResponseProto.newBuilder() to construct.
private RefreshQueuesResponseProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private RefreshQueuesResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new RefreshQueuesResponseProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshQueuesResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshQueuesResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.RefreshQueuesResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.RefreshQueuesResponseProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshQueuesResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshQueuesResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshQueuesResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.RefreshQueuesResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.RefreshQueuesResponseProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public RefreshQueuesResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface RefreshNodesRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.RefreshNodesRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.DecommissionTypeProto decommissionType = 1 [default = NORMAL];
* @return Whether the decommissionType field is set.
*/
boolean hasDecommissionType();
/**
* optional .hadoop.yarn.DecommissionTypeProto decommissionType = 1 [default = NORMAL];
* @return The decommissionType.
*/
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DecommissionTypeProto getDecommissionType();
/**
* optional int32 decommissionTimeout = 2;
* @return Whether the decommissionTimeout field is set.
*/
boolean hasDecommissionTimeout();
/**
* optional int32 decommissionTimeout = 2;
* @return The decommissionTimeout.
*/
int getDecommissionTimeout();
/**
* optional string sub_cluster_id = 3;
* @return Whether the subClusterId field is set.
*/
boolean hasSubClusterId();
/**
* optional string sub_cluster_id = 3;
* @return The subClusterId.
*/
java.lang.String getSubClusterId();
/**
* optional string sub_cluster_id = 3;
* @return The bytes for subClusterId.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes();
}
/**
* Protobuf type {@code hadoop.yarn.RefreshNodesRequestProto}
*/
public static final class RefreshNodesRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.RefreshNodesRequestProto)
RefreshNodesRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use RefreshNodesRequestProto.newBuilder() to construct.
private RefreshNodesRequestProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private RefreshNodesRequestProto() {
decommissionType_ = 1;
subClusterId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new RefreshNodesRequestProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshNodesRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshNodesRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto.Builder.class);
}
private int bitField0_;
public static final int DECOMMISSIONTYPE_FIELD_NUMBER = 1;
private int decommissionType_ = 1;
/**
* optional .hadoop.yarn.DecommissionTypeProto decommissionType = 1 [default = NORMAL];
* @return Whether the decommissionType field is set.
*/
@java.lang.Override public boolean hasDecommissionType() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.DecommissionTypeProto decommissionType = 1 [default = NORMAL];
* @return The decommissionType.
*/
@java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DecommissionTypeProto getDecommissionType() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DecommissionTypeProto result = org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DecommissionTypeProto.forNumber(decommissionType_);
return result == null ? org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DecommissionTypeProto.NORMAL : result;
}
public static final int DECOMMISSIONTIMEOUT_FIELD_NUMBER = 2;
private int decommissionTimeout_ = 0;
/**
* optional int32 decommissionTimeout = 2;
* @return Whether the decommissionTimeout field is set.
*/
@java.lang.Override
public boolean hasDecommissionTimeout() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional int32 decommissionTimeout = 2;
* @return The decommissionTimeout.
*/
@java.lang.Override
public int getDecommissionTimeout() {
return decommissionTimeout_;
}
public static final int SUB_CLUSTER_ID_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object subClusterId_ = "";
/**
* optional string sub_cluster_id = 3;
* @return Whether the subClusterId field is set.
*/
@java.lang.Override
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional string sub_cluster_id = 3;
* @return The subClusterId.
*/
@java.lang.Override
public java.lang.String getSubClusterId() {
java.lang.Object ref = subClusterId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
subClusterId_ = s;
}
return s;
}
}
/**
* optional string sub_cluster_id = 3;
* @return The bytes for subClusterId.
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes() {
java.lang.Object ref = subClusterId_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
subClusterId_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeEnum(1, decommissionType_);
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeInt32(2, decommissionTimeout_);
}
if (((bitField0_ & 0x00000004) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, subClusterId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeEnumSize(1, decommissionType_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt32Size(2, decommissionTimeout_);
}
if (((bitField0_ & 0x00000004) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, subClusterId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto) obj;
if (hasDecommissionType() != other.hasDecommissionType()) return false;
if (hasDecommissionType()) {
if (decommissionType_ != other.decommissionType_) return false;
}
if (hasDecommissionTimeout() != other.hasDecommissionTimeout()) return false;
if (hasDecommissionTimeout()) {
if (getDecommissionTimeout()
!= other.getDecommissionTimeout()) return false;
}
if (hasSubClusterId() != other.hasSubClusterId()) return false;
if (hasSubClusterId()) {
if (!getSubClusterId()
.equals(other.getSubClusterId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasDecommissionType()) {
hash = (37 * hash) + DECOMMISSIONTYPE_FIELD_NUMBER;
hash = (53 * hash) + decommissionType_;
}
if (hasDecommissionTimeout()) {
hash = (37 * hash) + DECOMMISSIONTIMEOUT_FIELD_NUMBER;
hash = (53 * hash) + getDecommissionTimeout();
}
if (hasSubClusterId()) {
hash = (37 * hash) + SUB_CLUSTER_ID_FIELD_NUMBER;
hash = (53 * hash) + getSubClusterId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.RefreshNodesRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.RefreshNodesRequestProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshNodesRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshNodesRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
decommissionType_ = 1;
decommissionTimeout_ = 0;
subClusterId_ = "";
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshNodesRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.decommissionType_ = decommissionType_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.decommissionTimeout_ = decommissionTimeout_;
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.subClusterId_ = subClusterId_;
to_bitField0_ |= 0x00000004;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto.getDefaultInstance()) return this;
if (other.hasDecommissionType()) {
setDecommissionType(other.getDecommissionType());
}
if (other.hasDecommissionTimeout()) {
setDecommissionTimeout(other.getDecommissionTimeout());
}
if (other.hasSubClusterId()) {
subClusterId_ = other.subClusterId_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
int tmpRaw = input.readEnum();
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DecommissionTypeProto tmpValue =
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DecommissionTypeProto.forNumber(tmpRaw);
if (tmpValue == null) {
mergeUnknownVarintField(1, tmpRaw);
} else {
decommissionType_ = tmpRaw;
bitField0_ |= 0x00000001;
}
break;
} // case 8
case 16: {
decommissionTimeout_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26: {
subClusterId_ = input.readBytes();
bitField0_ |= 0x00000004;
break;
} // case 26
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int decommissionType_ = 1;
/**
* optional .hadoop.yarn.DecommissionTypeProto decommissionType = 1 [default = NORMAL];
* @return Whether the decommissionType field is set.
*/
@java.lang.Override public boolean hasDecommissionType() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.DecommissionTypeProto decommissionType = 1 [default = NORMAL];
* @return The decommissionType.
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DecommissionTypeProto getDecommissionType() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DecommissionTypeProto result = org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DecommissionTypeProto.forNumber(decommissionType_);
return result == null ? org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DecommissionTypeProto.NORMAL : result;
}
/**
* optional .hadoop.yarn.DecommissionTypeProto decommissionType = 1 [default = NORMAL];
* @param value The decommissionType to set.
* @return This builder for chaining.
*/
public Builder setDecommissionType(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DecommissionTypeProto value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
decommissionType_ = value.getNumber();
onChanged();
return this;
}
/**
* optional .hadoop.yarn.DecommissionTypeProto decommissionType = 1 [default = NORMAL];
* @return This builder for chaining.
*/
public Builder clearDecommissionType() {
bitField0_ = (bitField0_ & ~0x00000001);
decommissionType_ = 1;
onChanged();
return this;
}
private int decommissionTimeout_ ;
/**
* optional int32 decommissionTimeout = 2;
* @return Whether the decommissionTimeout field is set.
*/
@java.lang.Override
public boolean hasDecommissionTimeout() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional int32 decommissionTimeout = 2;
* @return The decommissionTimeout.
*/
@java.lang.Override
public int getDecommissionTimeout() {
return decommissionTimeout_;
}
/**
* optional int32 decommissionTimeout = 2;
* @param value The decommissionTimeout to set.
* @return This builder for chaining.
*/
public Builder setDecommissionTimeout(int value) {
decommissionTimeout_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* optional int32 decommissionTimeout = 2;
* @return This builder for chaining.
*/
public Builder clearDecommissionTimeout() {
bitField0_ = (bitField0_ & ~0x00000002);
decommissionTimeout_ = 0;
onChanged();
return this;
}
private java.lang.Object subClusterId_ = "";
/**
* optional string sub_cluster_id = 3;
* @return Whether the subClusterId field is set.
*/
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional string sub_cluster_id = 3;
* @return The subClusterId.
*/
public java.lang.String getSubClusterId() {
java.lang.Object ref = subClusterId_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
subClusterId_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string sub_cluster_id = 3;
* @return The bytes for subClusterId.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes() {
java.lang.Object ref = subClusterId_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
subClusterId_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string sub_cluster_id = 3;
* @param value The subClusterId to set.
* @return This builder for chaining.
*/
public Builder setSubClusterId(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
subClusterId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* optional string sub_cluster_id = 3;
* @return This builder for chaining.
*/
public Builder clearSubClusterId() {
subClusterId_ = getDefaultInstance().getSubClusterId();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
* optional string sub_cluster_id = 3;
* @param value The bytes for subClusterId to set.
* @return This builder for chaining.
*/
public Builder setSubClusterIdBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
subClusterId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.RefreshNodesRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.RefreshNodesRequestProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public RefreshNodesRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface RefreshNodesResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.RefreshNodesResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.yarn.RefreshNodesResponseProto}
*/
public static final class RefreshNodesResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.RefreshNodesResponseProto)
RefreshNodesResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use RefreshNodesResponseProto.newBuilder() to construct.
private RefreshNodesResponseProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private RefreshNodesResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new RefreshNodesResponseProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshNodesResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshNodesResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.RefreshNodesResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.RefreshNodesResponseProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshNodesResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshNodesResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshNodesResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.RefreshNodesResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.RefreshNodesResponseProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public RefreshNodesResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface RefreshSuperUserGroupsConfigurationRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.RefreshSuperUserGroupsConfigurationRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional string sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
boolean hasSubClusterId();
/**
* optional string sub_cluster_id = 1;
* @return The subClusterId.
*/
java.lang.String getSubClusterId();
/**
* optional string sub_cluster_id = 1;
* @return The bytes for subClusterId.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes();
}
/**
* Protobuf type {@code hadoop.yarn.RefreshSuperUserGroupsConfigurationRequestProto}
*/
public static final class RefreshSuperUserGroupsConfigurationRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.RefreshSuperUserGroupsConfigurationRequestProto)
RefreshSuperUserGroupsConfigurationRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use RefreshSuperUserGroupsConfigurationRequestProto.newBuilder() to construct.
private RefreshSuperUserGroupsConfigurationRequestProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private RefreshSuperUserGroupsConfigurationRequestProto() {
subClusterId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new RefreshSuperUserGroupsConfigurationRequestProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshSuperUserGroupsConfigurationRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshSuperUserGroupsConfigurationRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto.Builder.class);
}
private int bitField0_;
public static final int SUB_CLUSTER_ID_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object subClusterId_ = "";
/**
* optional string sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
@java.lang.Override
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string sub_cluster_id = 1;
* @return The subClusterId.
*/
@java.lang.Override
public java.lang.String getSubClusterId() {
java.lang.Object ref = subClusterId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
subClusterId_ = s;
}
return s;
}
}
/**
* optional string sub_cluster_id = 1;
* @return The bytes for subClusterId.
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes() {
java.lang.Object ref = subClusterId_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
subClusterId_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, subClusterId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, subClusterId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto) obj;
if (hasSubClusterId() != other.hasSubClusterId()) return false;
if (hasSubClusterId()) {
if (!getSubClusterId()
.equals(other.getSubClusterId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasSubClusterId()) {
hash = (37 * hash) + SUB_CLUSTER_ID_FIELD_NUMBER;
hash = (53 * hash) + getSubClusterId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.RefreshSuperUserGroupsConfigurationRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.RefreshSuperUserGroupsConfigurationRequestProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshSuperUserGroupsConfigurationRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshSuperUserGroupsConfigurationRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
subClusterId_ = "";
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshSuperUserGroupsConfigurationRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.subClusterId_ = subClusterId_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto.getDefaultInstance()) return this;
if (other.hasSubClusterId()) {
subClusterId_ = other.subClusterId_;
bitField0_ |= 0x00000001;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
subClusterId_ = input.readBytes();
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object subClusterId_ = "";
/**
* optional string sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string sub_cluster_id = 1;
* @return The subClusterId.
*/
public java.lang.String getSubClusterId() {
java.lang.Object ref = subClusterId_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
subClusterId_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string sub_cluster_id = 1;
* @return The bytes for subClusterId.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes() {
java.lang.Object ref = subClusterId_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
subClusterId_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string sub_cluster_id = 1;
* @param value The subClusterId to set.
* @return This builder for chaining.
*/
public Builder setSubClusterId(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
subClusterId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional string sub_cluster_id = 1;
* @return This builder for chaining.
*/
public Builder clearSubClusterId() {
subClusterId_ = getDefaultInstance().getSubClusterId();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* optional string sub_cluster_id = 1;
* @param value The bytes for subClusterId to set.
* @return This builder for chaining.
*/
public Builder setSubClusterIdBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
subClusterId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.RefreshSuperUserGroupsConfigurationRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.RefreshSuperUserGroupsConfigurationRequestProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public RefreshSuperUserGroupsConfigurationRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface RefreshSuperUserGroupsConfigurationResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.RefreshSuperUserGroupsConfigurationResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.yarn.RefreshSuperUserGroupsConfigurationResponseProto}
*/
public static final class RefreshSuperUserGroupsConfigurationResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.RefreshSuperUserGroupsConfigurationResponseProto)
RefreshSuperUserGroupsConfigurationResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use RefreshSuperUserGroupsConfigurationResponseProto.newBuilder() to construct.
private RefreshSuperUserGroupsConfigurationResponseProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private RefreshSuperUserGroupsConfigurationResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new RefreshSuperUserGroupsConfigurationResponseProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshSuperUserGroupsConfigurationResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshSuperUserGroupsConfigurationResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.RefreshSuperUserGroupsConfigurationResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.RefreshSuperUserGroupsConfigurationResponseProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshSuperUserGroupsConfigurationResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshSuperUserGroupsConfigurationResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshSuperUserGroupsConfigurationResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.RefreshSuperUserGroupsConfigurationResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.RefreshSuperUserGroupsConfigurationResponseProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public RefreshSuperUserGroupsConfigurationResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface RefreshUserToGroupsMappingsRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.RefreshUserToGroupsMappingsRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional string sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
boolean hasSubClusterId();
/**
* optional string sub_cluster_id = 1;
* @return The subClusterId.
*/
java.lang.String getSubClusterId();
/**
* optional string sub_cluster_id = 1;
* @return The bytes for subClusterId.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes();
}
/**
* Protobuf type {@code hadoop.yarn.RefreshUserToGroupsMappingsRequestProto}
*/
public static final class RefreshUserToGroupsMappingsRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.RefreshUserToGroupsMappingsRequestProto)
RefreshUserToGroupsMappingsRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use RefreshUserToGroupsMappingsRequestProto.newBuilder() to construct.
private RefreshUserToGroupsMappingsRequestProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private RefreshUserToGroupsMappingsRequestProto() {
subClusterId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new RefreshUserToGroupsMappingsRequestProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshUserToGroupsMappingsRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshUserToGroupsMappingsRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto.Builder.class);
}
private int bitField0_;
public static final int SUB_CLUSTER_ID_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object subClusterId_ = "";
/**
* optional string sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
@java.lang.Override
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string sub_cluster_id = 1;
* @return The subClusterId.
*/
@java.lang.Override
public java.lang.String getSubClusterId() {
java.lang.Object ref = subClusterId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
subClusterId_ = s;
}
return s;
}
}
/**
* optional string sub_cluster_id = 1;
* @return The bytes for subClusterId.
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes() {
java.lang.Object ref = subClusterId_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
subClusterId_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, subClusterId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, subClusterId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto) obj;
if (hasSubClusterId() != other.hasSubClusterId()) return false;
if (hasSubClusterId()) {
if (!getSubClusterId()
.equals(other.getSubClusterId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasSubClusterId()) {
hash = (37 * hash) + SUB_CLUSTER_ID_FIELD_NUMBER;
hash = (53 * hash) + getSubClusterId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.RefreshUserToGroupsMappingsRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.RefreshUserToGroupsMappingsRequestProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshUserToGroupsMappingsRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshUserToGroupsMappingsRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
subClusterId_ = "";
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshUserToGroupsMappingsRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.subClusterId_ = subClusterId_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto.getDefaultInstance()) return this;
if (other.hasSubClusterId()) {
subClusterId_ = other.subClusterId_;
bitField0_ |= 0x00000001;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
subClusterId_ = input.readBytes();
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object subClusterId_ = "";
/**
* optional string sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string sub_cluster_id = 1;
* @return The subClusterId.
*/
public java.lang.String getSubClusterId() {
java.lang.Object ref = subClusterId_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
subClusterId_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string sub_cluster_id = 1;
* @return The bytes for subClusterId.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes() {
java.lang.Object ref = subClusterId_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
subClusterId_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string sub_cluster_id = 1;
* @param value The subClusterId to set.
* @return This builder for chaining.
*/
public Builder setSubClusterId(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
subClusterId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional string sub_cluster_id = 1;
* @return This builder for chaining.
*/
public Builder clearSubClusterId() {
subClusterId_ = getDefaultInstance().getSubClusterId();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* optional string sub_cluster_id = 1;
* @param value The bytes for subClusterId to set.
* @return This builder for chaining.
*/
public Builder setSubClusterIdBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
subClusterId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.RefreshUserToGroupsMappingsRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.RefreshUserToGroupsMappingsRequestProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public RefreshUserToGroupsMappingsRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface RefreshUserToGroupsMappingsResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.RefreshUserToGroupsMappingsResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.yarn.RefreshUserToGroupsMappingsResponseProto}
*/
public static final class RefreshUserToGroupsMappingsResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.RefreshUserToGroupsMappingsResponseProto)
RefreshUserToGroupsMappingsResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use RefreshUserToGroupsMappingsResponseProto.newBuilder() to construct.
private RefreshUserToGroupsMappingsResponseProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private RefreshUserToGroupsMappingsResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new RefreshUserToGroupsMappingsResponseProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshUserToGroupsMappingsResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshUserToGroupsMappingsResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.RefreshUserToGroupsMappingsResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.RefreshUserToGroupsMappingsResponseProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshUserToGroupsMappingsResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshUserToGroupsMappingsResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshUserToGroupsMappingsResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.RefreshUserToGroupsMappingsResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.RefreshUserToGroupsMappingsResponseProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public RefreshUserToGroupsMappingsResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface RefreshAdminAclsRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.RefreshAdminAclsRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional string sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
boolean hasSubClusterId();
/**
* optional string sub_cluster_id = 1;
* @return The subClusterId.
*/
java.lang.String getSubClusterId();
/**
* optional string sub_cluster_id = 1;
* @return The bytes for subClusterId.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes();
}
/**
* Protobuf type {@code hadoop.yarn.RefreshAdminAclsRequestProto}
*/
public static final class RefreshAdminAclsRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.RefreshAdminAclsRequestProto)
RefreshAdminAclsRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use RefreshAdminAclsRequestProto.newBuilder() to construct.
private RefreshAdminAclsRequestProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private RefreshAdminAclsRequestProto() {
subClusterId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new RefreshAdminAclsRequestProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshAdminAclsRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshAdminAclsRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto.Builder.class);
}
private int bitField0_;
public static final int SUB_CLUSTER_ID_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object subClusterId_ = "";
/**
* optional string sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
@java.lang.Override
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string sub_cluster_id = 1;
* @return The subClusterId.
*/
@java.lang.Override
public java.lang.String getSubClusterId() {
java.lang.Object ref = subClusterId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
subClusterId_ = s;
}
return s;
}
}
/**
* optional string sub_cluster_id = 1;
* @return The bytes for subClusterId.
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes() {
java.lang.Object ref = subClusterId_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
subClusterId_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, subClusterId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, subClusterId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto) obj;
if (hasSubClusterId() != other.hasSubClusterId()) return false;
if (hasSubClusterId()) {
if (!getSubClusterId()
.equals(other.getSubClusterId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasSubClusterId()) {
hash = (37 * hash) + SUB_CLUSTER_ID_FIELD_NUMBER;
hash = (53 * hash) + getSubClusterId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.RefreshAdminAclsRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.RefreshAdminAclsRequestProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshAdminAclsRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshAdminAclsRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
subClusterId_ = "";
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshAdminAclsRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.subClusterId_ = subClusterId_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto.getDefaultInstance()) return this;
if (other.hasSubClusterId()) {
subClusterId_ = other.subClusterId_;
bitField0_ |= 0x00000001;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
subClusterId_ = input.readBytes();
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object subClusterId_ = "";
/**
* optional string sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string sub_cluster_id = 1;
* @return The subClusterId.
*/
public java.lang.String getSubClusterId() {
java.lang.Object ref = subClusterId_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
subClusterId_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string sub_cluster_id = 1;
* @return The bytes for subClusterId.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes() {
java.lang.Object ref = subClusterId_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
subClusterId_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string sub_cluster_id = 1;
* @param value The subClusterId to set.
* @return This builder for chaining.
*/
public Builder setSubClusterId(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
subClusterId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional string sub_cluster_id = 1;
* @return This builder for chaining.
*/
public Builder clearSubClusterId() {
subClusterId_ = getDefaultInstance().getSubClusterId();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* optional string sub_cluster_id = 1;
* @param value The bytes for subClusterId to set.
* @return This builder for chaining.
*/
public Builder setSubClusterIdBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
subClusterId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.RefreshAdminAclsRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.RefreshAdminAclsRequestProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public RefreshAdminAclsRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface RefreshAdminAclsResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.RefreshAdminAclsResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.yarn.RefreshAdminAclsResponseProto}
*/
public static final class RefreshAdminAclsResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.RefreshAdminAclsResponseProto)
RefreshAdminAclsResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use RefreshAdminAclsResponseProto.newBuilder() to construct.
private RefreshAdminAclsResponseProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private RefreshAdminAclsResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new RefreshAdminAclsResponseProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshAdminAclsResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshAdminAclsResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.RefreshAdminAclsResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.RefreshAdminAclsResponseProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshAdminAclsResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshAdminAclsResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshAdminAclsResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.RefreshAdminAclsResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.RefreshAdminAclsResponseProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public RefreshAdminAclsResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface RefreshServiceAclsRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.RefreshServiceAclsRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional string sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
boolean hasSubClusterId();
/**
* optional string sub_cluster_id = 1;
* @return The subClusterId.
*/
java.lang.String getSubClusterId();
/**
* optional string sub_cluster_id = 1;
* @return The bytes for subClusterId.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes();
}
/**
* Protobuf type {@code hadoop.yarn.RefreshServiceAclsRequestProto}
*/
public static final class RefreshServiceAclsRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.RefreshServiceAclsRequestProto)
RefreshServiceAclsRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use RefreshServiceAclsRequestProto.newBuilder() to construct.
private RefreshServiceAclsRequestProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private RefreshServiceAclsRequestProto() {
subClusterId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new RefreshServiceAclsRequestProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshServiceAclsRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshServiceAclsRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto.Builder.class);
}
private int bitField0_;
public static final int SUB_CLUSTER_ID_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object subClusterId_ = "";
/**
* optional string sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
@java.lang.Override
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string sub_cluster_id = 1;
* @return The subClusterId.
*/
@java.lang.Override
public java.lang.String getSubClusterId() {
java.lang.Object ref = subClusterId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
subClusterId_ = s;
}
return s;
}
}
/**
* optional string sub_cluster_id = 1;
* @return The bytes for subClusterId.
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes() {
java.lang.Object ref = subClusterId_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
subClusterId_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, subClusterId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, subClusterId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto) obj;
if (hasSubClusterId() != other.hasSubClusterId()) return false;
if (hasSubClusterId()) {
if (!getSubClusterId()
.equals(other.getSubClusterId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasSubClusterId()) {
hash = (37 * hash) + SUB_CLUSTER_ID_FIELD_NUMBER;
hash = (53 * hash) + getSubClusterId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.RefreshServiceAclsRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.RefreshServiceAclsRequestProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshServiceAclsRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshServiceAclsRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
subClusterId_ = "";
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshServiceAclsRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.subClusterId_ = subClusterId_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto.getDefaultInstance()) return this;
if (other.hasSubClusterId()) {
subClusterId_ = other.subClusterId_;
bitField0_ |= 0x00000001;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
subClusterId_ = input.readBytes();
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object subClusterId_ = "";
/**
* optional string sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string sub_cluster_id = 1;
* @return The subClusterId.
*/
public java.lang.String getSubClusterId() {
java.lang.Object ref = subClusterId_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
subClusterId_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string sub_cluster_id = 1;
* @return The bytes for subClusterId.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes() {
java.lang.Object ref = subClusterId_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
subClusterId_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string sub_cluster_id = 1;
* @param value The subClusterId to set.
* @return This builder for chaining.
*/
public Builder setSubClusterId(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
subClusterId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional string sub_cluster_id = 1;
* @return This builder for chaining.
*/
public Builder clearSubClusterId() {
subClusterId_ = getDefaultInstance().getSubClusterId();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* optional string sub_cluster_id = 1;
* @param value The bytes for subClusterId to set.
* @return This builder for chaining.
*/
public Builder setSubClusterIdBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
subClusterId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.RefreshServiceAclsRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.RefreshServiceAclsRequestProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public RefreshServiceAclsRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface RefreshServiceAclsResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.RefreshServiceAclsResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.yarn.RefreshServiceAclsResponseProto}
*/
public static final class RefreshServiceAclsResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.RefreshServiceAclsResponseProto)
RefreshServiceAclsResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use RefreshServiceAclsResponseProto.newBuilder() to construct.
private RefreshServiceAclsResponseProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private RefreshServiceAclsResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new RefreshServiceAclsResponseProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshServiceAclsResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshServiceAclsResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.RefreshServiceAclsResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.RefreshServiceAclsResponseProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshServiceAclsResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshServiceAclsResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshServiceAclsResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.RefreshServiceAclsResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.RefreshServiceAclsResponseProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public RefreshServiceAclsResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetGroupsForUserRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.GetGroupsForUserRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* required string user = 1;
* @return Whether the user field is set.
*/
boolean hasUser();
/**
* required string user = 1;
* @return The user.
*/
java.lang.String getUser();
/**
* required string user = 1;
* @return The bytes for user.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getUserBytes();
}
/**
* Protobuf type {@code hadoop.yarn.GetGroupsForUserRequestProto}
*/
public static final class GetGroupsForUserRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.GetGroupsForUserRequestProto)
GetGroupsForUserRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetGroupsForUserRequestProto.newBuilder() to construct.
private GetGroupsForUserRequestProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetGroupsForUserRequestProto() {
user_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetGroupsForUserRequestProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_GetGroupsForUserRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_GetGroupsForUserRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto.Builder.class);
}
private int bitField0_;
public static final int USER_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object user_ = "";
/**
* required string user = 1;
* @return Whether the user field is set.
*/
@java.lang.Override
public boolean hasUser() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string user = 1;
* @return The user.
*/
@java.lang.Override
public java.lang.String getUser() {
java.lang.Object ref = user_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
user_ = s;
}
return s;
}
}
/**
* required string user = 1;
* @return The bytes for user.
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getUserBytes() {
java.lang.Object ref = user_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
user_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasUser()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, user_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, user_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto) obj;
if (hasUser() != other.hasUser()) return false;
if (hasUser()) {
if (!getUser()
.equals(other.getUser())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasUser()) {
hash = (37 * hash) + USER_FIELD_NUMBER;
hash = (53 * hash) + getUser().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.GetGroupsForUserRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.GetGroupsForUserRequestProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_GetGroupsForUserRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_GetGroupsForUserRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
user_ = "";
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_GetGroupsForUserRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.user_ = user_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto.getDefaultInstance()) return this;
if (other.hasUser()) {
user_ = other.user_;
bitField0_ |= 0x00000001;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasUser()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
user_ = input.readBytes();
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object user_ = "";
/**
* required string user = 1;
* @return Whether the user field is set.
*/
public boolean hasUser() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string user = 1;
* @return The user.
*/
public java.lang.String getUser() {
java.lang.Object ref = user_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
user_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string user = 1;
* @return The bytes for user.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getUserBytes() {
java.lang.Object ref = user_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
user_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* required string user = 1;
* @param value The user to set.
* @return This builder for chaining.
*/
public Builder setUser(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
user_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* required string user = 1;
* @return This builder for chaining.
*/
public Builder clearUser() {
user_ = getDefaultInstance().getUser();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* required string user = 1;
* @param value The bytes for user to set.
* @return This builder for chaining.
*/
public Builder setUserBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
user_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.GetGroupsForUserRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.GetGroupsForUserRequestProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetGroupsForUserRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetGroupsForUserResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.GetGroupsForUserResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* repeated string groups = 1;
* @return A list containing the groups.
*/
java.util.List
getGroupsList();
/**
* repeated string groups = 1;
* @return The count of groups.
*/
int getGroupsCount();
/**
* repeated string groups = 1;
* @param index The index of the element to return.
* @return The groups at the given index.
*/
java.lang.String getGroups(int index);
/**
* repeated string groups = 1;
* @param index The index of the value to return.
* @return The bytes of the groups at the given index.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getGroupsBytes(int index);
}
/**
* Protobuf type {@code hadoop.yarn.GetGroupsForUserResponseProto}
*/
public static final class GetGroupsForUserResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.GetGroupsForUserResponseProto)
GetGroupsForUserResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetGroupsForUserResponseProto.newBuilder() to construct.
private GetGroupsForUserResponseProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetGroupsForUserResponseProto() {
groups_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetGroupsForUserResponseProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_GetGroupsForUserResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_GetGroupsForUserResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto.Builder.class);
}
public static final int GROUPS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList groups_;
/**
* repeated string groups = 1;
* @return A list containing the groups.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
getGroupsList() {
return groups_;
}
/**
* repeated string groups = 1;
* @return The count of groups.
*/
public int getGroupsCount() {
return groups_.size();
}
/**
* repeated string groups = 1;
* @param index The index of the element to return.
* @return The groups at the given index.
*/
public java.lang.String getGroups(int index) {
return groups_.get(index);
}
/**
* repeated string groups = 1;
* @param index The index of the value to return.
* @return The bytes of the groups at the given index.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getGroupsBytes(int index) {
return groups_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < groups_.size(); i++) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, groups_.getRaw(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < groups_.size(); i++) {
dataSize += computeStringSizeNoTag(groups_.getRaw(i));
}
size += dataSize;
size += 1 * getGroupsList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto) obj;
if (!getGroupsList()
.equals(other.getGroupsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getGroupsCount() > 0) {
hash = (37 * hash) + GROUPS_FIELD_NUMBER;
hash = (53 * hash) + getGroupsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.GetGroupsForUserResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.GetGroupsForUserResponseProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_GetGroupsForUserResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_GetGroupsForUserResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
groups_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_GetGroupsForUserResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto result) {
if (((bitField0_ & 0x00000001) != 0)) {
groups_ = groups_.getUnmodifiableView();
bitField0_ = (bitField0_ & ~0x00000001);
}
result.groups_ = groups_;
}
private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto.getDefaultInstance()) return this;
if (!other.groups_.isEmpty()) {
if (groups_.isEmpty()) {
groups_ = other.groups_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureGroupsIsMutable();
groups_.addAll(other.groups_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
ensureGroupsIsMutable();
groups_.add(bs);
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList groups_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY;
private void ensureGroupsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
groups_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(groups_);
bitField0_ |= 0x00000001;
}
}
/**
* repeated string groups = 1;
* @return A list containing the groups.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
getGroupsList() {
return groups_.getUnmodifiableView();
}
/**
* repeated string groups = 1;
* @return The count of groups.
*/
public int getGroupsCount() {
return groups_.size();
}
/**
* repeated string groups = 1;
* @param index The index of the element to return.
* @return The groups at the given index.
*/
public java.lang.String getGroups(int index) {
return groups_.get(index);
}
/**
* repeated string groups = 1;
* @param index The index of the value to return.
* @return The bytes of the groups at the given index.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getGroupsBytes(int index) {
return groups_.getByteString(index);
}
/**
* repeated string groups = 1;
* @param index The index to set the value at.
* @param value The groups to set.
* @return This builder for chaining.
*/
public Builder setGroups(
int index, java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureGroupsIsMutable();
groups_.set(index, value);
onChanged();
return this;
}
/**
* repeated string groups = 1;
* @param value The groups to add.
* @return This builder for chaining.
*/
public Builder addGroups(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureGroupsIsMutable();
groups_.add(value);
onChanged();
return this;
}
/**
* repeated string groups = 1;
* @param values The groups to add.
* @return This builder for chaining.
*/
public Builder addAllGroups(
java.lang.Iterable values) {
ensureGroupsIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, groups_);
onChanged();
return this;
}
/**
* repeated string groups = 1;
* @return This builder for chaining.
*/
public Builder clearGroups() {
groups_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* repeated string groups = 1;
* @param value The bytes of the groups to add.
* @return This builder for chaining.
*/
public Builder addGroupsBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
ensureGroupsIsMutable();
groups_.add(value);
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.GetGroupsForUserResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.GetGroupsForUserResponseProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetGroupsForUserResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface UpdateNodeResourceRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.UpdateNodeResourceRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* repeated .hadoop.yarn.NodeResourceMapProto node_resource_map = 1;
*/
java.util.List
getNodeResourceMapList();
/**
* repeated .hadoop.yarn.NodeResourceMapProto node_resource_map = 1;
*/
org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto getNodeResourceMap(int index);
/**
* repeated .hadoop.yarn.NodeResourceMapProto node_resource_map = 1;
*/
int getNodeResourceMapCount();
/**
* repeated .hadoop.yarn.NodeResourceMapProto node_resource_map = 1;
*/
java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProtoOrBuilder>
getNodeResourceMapOrBuilderList();
/**
* repeated .hadoop.yarn.NodeResourceMapProto node_resource_map = 1;
*/
org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProtoOrBuilder getNodeResourceMapOrBuilder(
int index);
/**
* optional string sub_cluster_id = 2;
* @return Whether the subClusterId field is set.
*/
boolean hasSubClusterId();
/**
* optional string sub_cluster_id = 2;
* @return The subClusterId.
*/
java.lang.String getSubClusterId();
/**
* optional string sub_cluster_id = 2;
* @return The bytes for subClusterId.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes();
}
/**
* Protobuf type {@code hadoop.yarn.UpdateNodeResourceRequestProto}
*/
public static final class UpdateNodeResourceRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.UpdateNodeResourceRequestProto)
UpdateNodeResourceRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateNodeResourceRequestProto.newBuilder() to construct.
private UpdateNodeResourceRequestProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private UpdateNodeResourceRequestProto() {
nodeResourceMap_ = java.util.Collections.emptyList();
subClusterId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new UpdateNodeResourceRequestProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_UpdateNodeResourceRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_UpdateNodeResourceRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto.Builder.class);
}
private int bitField0_;
public static final int NODE_RESOURCE_MAP_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List nodeResourceMap_;
/**
* repeated .hadoop.yarn.NodeResourceMapProto node_resource_map = 1;
*/
@java.lang.Override
public java.util.List getNodeResourceMapList() {
return nodeResourceMap_;
}
/**
* repeated .hadoop.yarn.NodeResourceMapProto node_resource_map = 1;
*/
@java.lang.Override
public java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProtoOrBuilder>
getNodeResourceMapOrBuilderList() {
return nodeResourceMap_;
}
/**
* repeated .hadoop.yarn.NodeResourceMapProto node_resource_map = 1;
*/
@java.lang.Override
public int getNodeResourceMapCount() {
return nodeResourceMap_.size();
}
/**
* repeated .hadoop.yarn.NodeResourceMapProto node_resource_map = 1;
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto getNodeResourceMap(int index) {
return nodeResourceMap_.get(index);
}
/**
* repeated .hadoop.yarn.NodeResourceMapProto node_resource_map = 1;
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProtoOrBuilder getNodeResourceMapOrBuilder(
int index) {
return nodeResourceMap_.get(index);
}
public static final int SUB_CLUSTER_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object subClusterId_ = "";
/**
* optional string sub_cluster_id = 2;
* @return Whether the subClusterId field is set.
*/
@java.lang.Override
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string sub_cluster_id = 2;
* @return The subClusterId.
*/
@java.lang.Override
public java.lang.String getSubClusterId() {
java.lang.Object ref = subClusterId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
subClusterId_ = s;
}
return s;
}
}
/**
* optional string sub_cluster_id = 2;
* @return The bytes for subClusterId.
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes() {
java.lang.Object ref = subClusterId_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
subClusterId_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
for (int i = 0; i < getNodeResourceMapCount(); i++) {
if (!getNodeResourceMap(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < nodeResourceMap_.size(); i++) {
output.writeMessage(1, nodeResourceMap_.get(i));
}
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, subClusterId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < nodeResourceMap_.size(); i++) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, nodeResourceMap_.get(i));
}
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, subClusterId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto) obj;
if (!getNodeResourceMapList()
.equals(other.getNodeResourceMapList())) return false;
if (hasSubClusterId() != other.hasSubClusterId()) return false;
if (hasSubClusterId()) {
if (!getSubClusterId()
.equals(other.getSubClusterId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getNodeResourceMapCount() > 0) {
hash = (37 * hash) + NODE_RESOURCE_MAP_FIELD_NUMBER;
hash = (53 * hash) + getNodeResourceMapList().hashCode();
}
if (hasSubClusterId()) {
hash = (37 * hash) + SUB_CLUSTER_ID_FIELD_NUMBER;
hash = (53 * hash) + getSubClusterId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.UpdateNodeResourceRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.UpdateNodeResourceRequestProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_UpdateNodeResourceRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_UpdateNodeResourceRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (nodeResourceMapBuilder_ == null) {
nodeResourceMap_ = java.util.Collections.emptyList();
} else {
nodeResourceMap_ = null;
nodeResourceMapBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
subClusterId_ = "";
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_UpdateNodeResourceRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto result) {
if (nodeResourceMapBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
nodeResourceMap_ = java.util.Collections.unmodifiableList(nodeResourceMap_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.nodeResourceMap_ = nodeResourceMap_;
} else {
result.nodeResourceMap_ = nodeResourceMapBuilder_.build();
}
}
private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.subClusterId_ = subClusterId_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto.getDefaultInstance()) return this;
if (nodeResourceMapBuilder_ == null) {
if (!other.nodeResourceMap_.isEmpty()) {
if (nodeResourceMap_.isEmpty()) {
nodeResourceMap_ = other.nodeResourceMap_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureNodeResourceMapIsMutable();
nodeResourceMap_.addAll(other.nodeResourceMap_);
}
onChanged();
}
} else {
if (!other.nodeResourceMap_.isEmpty()) {
if (nodeResourceMapBuilder_.isEmpty()) {
nodeResourceMapBuilder_.dispose();
nodeResourceMapBuilder_ = null;
nodeResourceMap_ = other.nodeResourceMap_;
bitField0_ = (bitField0_ & ~0x00000001);
nodeResourceMapBuilder_ =
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getNodeResourceMapFieldBuilder() : null;
} else {
nodeResourceMapBuilder_.addAllMessages(other.nodeResourceMap_);
}
}
}
if (other.hasSubClusterId()) {
subClusterId_ = other.subClusterId_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
for (int i = 0; i < getNodeResourceMapCount(); i++) {
if (!getNodeResourceMap(i).isInitialized()) {
return false;
}
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto m =
input.readMessage(
org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto.PARSER,
extensionRegistry);
if (nodeResourceMapBuilder_ == null) {
ensureNodeResourceMapIsMutable();
nodeResourceMap_.add(m);
} else {
nodeResourceMapBuilder_.addMessage(m);
}
break;
} // case 10
case 18: {
subClusterId_ = input.readBytes();
bitField0_ |= 0x00000002;
break;
} // case 18
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List nodeResourceMap_ =
java.util.Collections.emptyList();
private void ensureNodeResourceMapIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
nodeResourceMap_ = new java.util.ArrayList(nodeResourceMap_);
bitField0_ |= 0x00000001;
}
}
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProtoOrBuilder> nodeResourceMapBuilder_;
/**
* repeated .hadoop.yarn.NodeResourceMapProto node_resource_map = 1;
*/
public java.util.List getNodeResourceMapList() {
if (nodeResourceMapBuilder_ == null) {
return java.util.Collections.unmodifiableList(nodeResourceMap_);
} else {
return nodeResourceMapBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.yarn.NodeResourceMapProto node_resource_map = 1;
*/
public int getNodeResourceMapCount() {
if (nodeResourceMapBuilder_ == null) {
return nodeResourceMap_.size();
} else {
return nodeResourceMapBuilder_.getCount();
}
}
/**
* repeated .hadoop.yarn.NodeResourceMapProto node_resource_map = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto getNodeResourceMap(int index) {
if (nodeResourceMapBuilder_ == null) {
return nodeResourceMap_.get(index);
} else {
return nodeResourceMapBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.yarn.NodeResourceMapProto node_resource_map = 1;
*/
public Builder setNodeResourceMap(
int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto value) {
if (nodeResourceMapBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNodeResourceMapIsMutable();
nodeResourceMap_.set(index, value);
onChanged();
} else {
nodeResourceMapBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.NodeResourceMapProto node_resource_map = 1;
*/
public Builder setNodeResourceMap(
int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto.Builder builderForValue) {
if (nodeResourceMapBuilder_ == null) {
ensureNodeResourceMapIsMutable();
nodeResourceMap_.set(index, builderForValue.build());
onChanged();
} else {
nodeResourceMapBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.NodeResourceMapProto node_resource_map = 1;
*/
public Builder addNodeResourceMap(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto value) {
if (nodeResourceMapBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNodeResourceMapIsMutable();
nodeResourceMap_.add(value);
onChanged();
} else {
nodeResourceMapBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.yarn.NodeResourceMapProto node_resource_map = 1;
*/
public Builder addNodeResourceMap(
int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto value) {
if (nodeResourceMapBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNodeResourceMapIsMutable();
nodeResourceMap_.add(index, value);
onChanged();
} else {
nodeResourceMapBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.NodeResourceMapProto node_resource_map = 1;
*/
public Builder addNodeResourceMap(
org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto.Builder builderForValue) {
if (nodeResourceMapBuilder_ == null) {
ensureNodeResourceMapIsMutable();
nodeResourceMap_.add(builderForValue.build());
onChanged();
} else {
nodeResourceMapBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.NodeResourceMapProto node_resource_map = 1;
*/
public Builder addNodeResourceMap(
int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto.Builder builderForValue) {
if (nodeResourceMapBuilder_ == null) {
ensureNodeResourceMapIsMutable();
nodeResourceMap_.add(index, builderForValue.build());
onChanged();
} else {
nodeResourceMapBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.NodeResourceMapProto node_resource_map = 1;
*/
public Builder addAllNodeResourceMap(
java.lang.Iterable extends org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto> values) {
if (nodeResourceMapBuilder_ == null) {
ensureNodeResourceMapIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, nodeResourceMap_);
onChanged();
} else {
nodeResourceMapBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.yarn.NodeResourceMapProto node_resource_map = 1;
*/
public Builder clearNodeResourceMap() {
if (nodeResourceMapBuilder_ == null) {
nodeResourceMap_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
nodeResourceMapBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.yarn.NodeResourceMapProto node_resource_map = 1;
*/
public Builder removeNodeResourceMap(int index) {
if (nodeResourceMapBuilder_ == null) {
ensureNodeResourceMapIsMutable();
nodeResourceMap_.remove(index);
onChanged();
} else {
nodeResourceMapBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.yarn.NodeResourceMapProto node_resource_map = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto.Builder getNodeResourceMapBuilder(
int index) {
return getNodeResourceMapFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.yarn.NodeResourceMapProto node_resource_map = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProtoOrBuilder getNodeResourceMapOrBuilder(
int index) {
if (nodeResourceMapBuilder_ == null) {
return nodeResourceMap_.get(index); } else {
return nodeResourceMapBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.yarn.NodeResourceMapProto node_resource_map = 1;
*/
public java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProtoOrBuilder>
getNodeResourceMapOrBuilderList() {
if (nodeResourceMapBuilder_ != null) {
return nodeResourceMapBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(nodeResourceMap_);
}
}
/**
* repeated .hadoop.yarn.NodeResourceMapProto node_resource_map = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto.Builder addNodeResourceMapBuilder() {
return getNodeResourceMapFieldBuilder().addBuilder(
org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.NodeResourceMapProto node_resource_map = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto.Builder addNodeResourceMapBuilder(
int index) {
return getNodeResourceMapFieldBuilder().addBuilder(
index, org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.NodeResourceMapProto node_resource_map = 1;
*/
public java.util.List
getNodeResourceMapBuilderList() {
return getNodeResourceMapFieldBuilder().getBuilderList();
}
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProtoOrBuilder>
getNodeResourceMapFieldBuilder() {
if (nodeResourceMapBuilder_ == null) {
nodeResourceMapBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProtoOrBuilder>(
nodeResourceMap_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
nodeResourceMap_ = null;
}
return nodeResourceMapBuilder_;
}
private java.lang.Object subClusterId_ = "";
/**
* optional string sub_cluster_id = 2;
* @return Whether the subClusterId field is set.
*/
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional string sub_cluster_id = 2;
* @return The subClusterId.
*/
public java.lang.String getSubClusterId() {
java.lang.Object ref = subClusterId_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
subClusterId_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string sub_cluster_id = 2;
* @return The bytes for subClusterId.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes() {
java.lang.Object ref = subClusterId_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
subClusterId_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string sub_cluster_id = 2;
* @param value The subClusterId to set.
* @return This builder for chaining.
*/
public Builder setSubClusterId(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
subClusterId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* optional string sub_cluster_id = 2;
* @return This builder for chaining.
*/
public Builder clearSubClusterId() {
subClusterId_ = getDefaultInstance().getSubClusterId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* optional string sub_cluster_id = 2;
* @param value The bytes for subClusterId to set.
* @return This builder for chaining.
*/
public Builder setSubClusterIdBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
subClusterId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.UpdateNodeResourceRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.UpdateNodeResourceRequestProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public UpdateNodeResourceRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface UpdateNodeResourceResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.UpdateNodeResourceResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.yarn.UpdateNodeResourceResponseProto}
*/
public static final class UpdateNodeResourceResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.UpdateNodeResourceResponseProto)
UpdateNodeResourceResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateNodeResourceResponseProto.newBuilder() to construct.
private UpdateNodeResourceResponseProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private UpdateNodeResourceResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new UpdateNodeResourceResponseProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_UpdateNodeResourceResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_UpdateNodeResourceResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.UpdateNodeResourceResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.UpdateNodeResourceResponseProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_UpdateNodeResourceResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_UpdateNodeResourceResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_UpdateNodeResourceResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.UpdateNodeResourceResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.UpdateNodeResourceResponseProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public UpdateNodeResourceResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface RefreshNodesResourcesRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.RefreshNodesResourcesRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional string sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
boolean hasSubClusterId();
/**
* optional string sub_cluster_id = 1;
* @return The subClusterId.
*/
java.lang.String getSubClusterId();
/**
* optional string sub_cluster_id = 1;
* @return The bytes for subClusterId.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes();
}
/**
* Protobuf type {@code hadoop.yarn.RefreshNodesResourcesRequestProto}
*/
public static final class RefreshNodesResourcesRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.RefreshNodesResourcesRequestProto)
RefreshNodesResourcesRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use RefreshNodesResourcesRequestProto.newBuilder() to construct.
private RefreshNodesResourcesRequestProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private RefreshNodesResourcesRequestProto() {
subClusterId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new RefreshNodesResourcesRequestProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshNodesResourcesRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshNodesResourcesRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto.Builder.class);
}
private int bitField0_;
public static final int SUB_CLUSTER_ID_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object subClusterId_ = "";
/**
* optional string sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
@java.lang.Override
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string sub_cluster_id = 1;
* @return The subClusterId.
*/
@java.lang.Override
public java.lang.String getSubClusterId() {
java.lang.Object ref = subClusterId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
subClusterId_ = s;
}
return s;
}
}
/**
* optional string sub_cluster_id = 1;
* @return The bytes for subClusterId.
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes() {
java.lang.Object ref = subClusterId_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
subClusterId_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, subClusterId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, subClusterId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto) obj;
if (hasSubClusterId() != other.hasSubClusterId()) return false;
if (hasSubClusterId()) {
if (!getSubClusterId()
.equals(other.getSubClusterId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasSubClusterId()) {
hash = (37 * hash) + SUB_CLUSTER_ID_FIELD_NUMBER;
hash = (53 * hash) + getSubClusterId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.RefreshNodesResourcesRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.RefreshNodesResourcesRequestProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshNodesResourcesRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshNodesResourcesRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
subClusterId_ = "";
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshNodesResourcesRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.subClusterId_ = subClusterId_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto.getDefaultInstance()) return this;
if (other.hasSubClusterId()) {
subClusterId_ = other.subClusterId_;
bitField0_ |= 0x00000001;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
subClusterId_ = input.readBytes();
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object subClusterId_ = "";
/**
* optional string sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string sub_cluster_id = 1;
* @return The subClusterId.
*/
public java.lang.String getSubClusterId() {
java.lang.Object ref = subClusterId_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
subClusterId_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string sub_cluster_id = 1;
* @return The bytes for subClusterId.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes() {
java.lang.Object ref = subClusterId_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
subClusterId_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string sub_cluster_id = 1;
* @param value The subClusterId to set.
* @return This builder for chaining.
*/
public Builder setSubClusterId(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
subClusterId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional string sub_cluster_id = 1;
* @return This builder for chaining.
*/
public Builder clearSubClusterId() {
subClusterId_ = getDefaultInstance().getSubClusterId();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* optional string sub_cluster_id = 1;
* @param value The bytes for subClusterId to set.
* @return This builder for chaining.
*/
public Builder setSubClusterIdBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
subClusterId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.RefreshNodesResourcesRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.RefreshNodesResourcesRequestProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public RefreshNodesResourcesRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface RefreshNodesResourcesResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.RefreshNodesResourcesResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.yarn.RefreshNodesResourcesResponseProto}
*/
public static final class RefreshNodesResourcesResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.RefreshNodesResourcesResponseProto)
RefreshNodesResourcesResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use RefreshNodesResourcesResponseProto.newBuilder() to construct.
private RefreshNodesResourcesResponseProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private RefreshNodesResourcesResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new RefreshNodesResourcesResponseProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshNodesResourcesResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshNodesResourcesResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.RefreshNodesResourcesResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.RefreshNodesResourcesResponseProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshNodesResourcesResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshNodesResourcesResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshNodesResourcesResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.RefreshNodesResourcesResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.RefreshNodesResourcesResponseProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public RefreshNodesResourcesResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface AddToClusterNodeLabelsRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.AddToClusterNodeLabelsRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* repeated string deprecatedNodeLabels = 1;
* @return A list containing the deprecatedNodeLabels.
*/
java.util.List
getDeprecatedNodeLabelsList();
/**
* repeated string deprecatedNodeLabels = 1;
* @return The count of deprecatedNodeLabels.
*/
int getDeprecatedNodeLabelsCount();
/**
* repeated string deprecatedNodeLabels = 1;
* @param index The index of the element to return.
* @return The deprecatedNodeLabels at the given index.
*/
java.lang.String getDeprecatedNodeLabels(int index);
/**
* repeated string deprecatedNodeLabels = 1;
* @param index The index of the value to return.
* @return The bytes of the deprecatedNodeLabels at the given index.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getDeprecatedNodeLabelsBytes(int index);
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;
*/
java.util.List
getNodeLabelsList();
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;
*/
org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto getNodeLabels(int index);
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;
*/
int getNodeLabelsCount();
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;
*/
java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProtoOrBuilder>
getNodeLabelsOrBuilderList();
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;
*/
org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProtoOrBuilder getNodeLabelsOrBuilder(
int index);
/**
* optional string sub_cluster_id = 3;
* @return Whether the subClusterId field is set.
*/
boolean hasSubClusterId();
/**
* optional string sub_cluster_id = 3;
* @return The subClusterId.
*/
java.lang.String getSubClusterId();
/**
* optional string sub_cluster_id = 3;
* @return The bytes for subClusterId.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes();
}
/**
* Protobuf type {@code hadoop.yarn.AddToClusterNodeLabelsRequestProto}
*/
public static final class AddToClusterNodeLabelsRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.AddToClusterNodeLabelsRequestProto)
AddToClusterNodeLabelsRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use AddToClusterNodeLabelsRequestProto.newBuilder() to construct.
private AddToClusterNodeLabelsRequestProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private AddToClusterNodeLabelsRequestProto() {
deprecatedNodeLabels_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY;
nodeLabels_ = java.util.Collections.emptyList();
subClusterId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new AddToClusterNodeLabelsRequestProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_AddToClusterNodeLabelsRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_AddToClusterNodeLabelsRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto.Builder.class);
}
private int bitField0_;
public static final int DEPRECATEDNODELABELS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList deprecatedNodeLabels_;
/**
* repeated string deprecatedNodeLabels = 1;
* @return A list containing the deprecatedNodeLabels.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
getDeprecatedNodeLabelsList() {
return deprecatedNodeLabels_;
}
/**
* repeated string deprecatedNodeLabels = 1;
* @return The count of deprecatedNodeLabels.
*/
public int getDeprecatedNodeLabelsCount() {
return deprecatedNodeLabels_.size();
}
/**
* repeated string deprecatedNodeLabels = 1;
* @param index The index of the element to return.
* @return The deprecatedNodeLabels at the given index.
*/
public java.lang.String getDeprecatedNodeLabels(int index) {
return deprecatedNodeLabels_.get(index);
}
/**
* repeated string deprecatedNodeLabels = 1;
* @param index The index of the value to return.
* @return The bytes of the deprecatedNodeLabels at the given index.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getDeprecatedNodeLabelsBytes(int index) {
return deprecatedNodeLabels_.getByteString(index);
}
public static final int NODELABELS_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private java.util.List nodeLabels_;
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;
*/
@java.lang.Override
public java.util.List getNodeLabelsList() {
return nodeLabels_;
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;
*/
@java.lang.Override
public java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProtoOrBuilder>
getNodeLabelsOrBuilderList() {
return nodeLabels_;
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;
*/
@java.lang.Override
public int getNodeLabelsCount() {
return nodeLabels_.size();
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto getNodeLabels(int index) {
return nodeLabels_.get(index);
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProtoOrBuilder getNodeLabelsOrBuilder(
int index) {
return nodeLabels_.get(index);
}
public static final int SUB_CLUSTER_ID_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object subClusterId_ = "";
/**
* optional string sub_cluster_id = 3;
* @return Whether the subClusterId field is set.
*/
@java.lang.Override
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string sub_cluster_id = 3;
* @return The subClusterId.
*/
@java.lang.Override
public java.lang.String getSubClusterId() {
java.lang.Object ref = subClusterId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
subClusterId_ = s;
}
return s;
}
}
/**
* optional string sub_cluster_id = 3;
* @return The bytes for subClusterId.
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes() {
java.lang.Object ref = subClusterId_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
subClusterId_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < deprecatedNodeLabels_.size(); i++) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, deprecatedNodeLabels_.getRaw(i));
}
for (int i = 0; i < nodeLabels_.size(); i++) {
output.writeMessage(2, nodeLabels_.get(i));
}
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, subClusterId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < deprecatedNodeLabels_.size(); i++) {
dataSize += computeStringSizeNoTag(deprecatedNodeLabels_.getRaw(i));
}
size += dataSize;
size += 1 * getDeprecatedNodeLabelsList().size();
}
for (int i = 0; i < nodeLabels_.size(); i++) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(2, nodeLabels_.get(i));
}
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, subClusterId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto) obj;
if (!getDeprecatedNodeLabelsList()
.equals(other.getDeprecatedNodeLabelsList())) return false;
if (!getNodeLabelsList()
.equals(other.getNodeLabelsList())) return false;
if (hasSubClusterId() != other.hasSubClusterId()) return false;
if (hasSubClusterId()) {
if (!getSubClusterId()
.equals(other.getSubClusterId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getDeprecatedNodeLabelsCount() > 0) {
hash = (37 * hash) + DEPRECATEDNODELABELS_FIELD_NUMBER;
hash = (53 * hash) + getDeprecatedNodeLabelsList().hashCode();
}
if (getNodeLabelsCount() > 0) {
hash = (37 * hash) + NODELABELS_FIELD_NUMBER;
hash = (53 * hash) + getNodeLabelsList().hashCode();
}
if (hasSubClusterId()) {
hash = (37 * hash) + SUB_CLUSTER_ID_FIELD_NUMBER;
hash = (53 * hash) + getSubClusterId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.AddToClusterNodeLabelsRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.AddToClusterNodeLabelsRequestProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_AddToClusterNodeLabelsRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_AddToClusterNodeLabelsRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
deprecatedNodeLabels_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
if (nodeLabelsBuilder_ == null) {
nodeLabels_ = java.util.Collections.emptyList();
} else {
nodeLabels_ = null;
nodeLabelsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
subClusterId_ = "";
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_AddToClusterNodeLabelsRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto result) {
if (((bitField0_ & 0x00000001) != 0)) {
deprecatedNodeLabels_ = deprecatedNodeLabels_.getUnmodifiableView();
bitField0_ = (bitField0_ & ~0x00000001);
}
result.deprecatedNodeLabels_ = deprecatedNodeLabels_;
if (nodeLabelsBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)) {
nodeLabels_ = java.util.Collections.unmodifiableList(nodeLabels_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.nodeLabels_ = nodeLabels_;
} else {
result.nodeLabels_ = nodeLabelsBuilder_.build();
}
}
private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.subClusterId_ = subClusterId_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto.getDefaultInstance()) return this;
if (!other.deprecatedNodeLabels_.isEmpty()) {
if (deprecatedNodeLabels_.isEmpty()) {
deprecatedNodeLabels_ = other.deprecatedNodeLabels_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureDeprecatedNodeLabelsIsMutable();
deprecatedNodeLabels_.addAll(other.deprecatedNodeLabels_);
}
onChanged();
}
if (nodeLabelsBuilder_ == null) {
if (!other.nodeLabels_.isEmpty()) {
if (nodeLabels_.isEmpty()) {
nodeLabels_ = other.nodeLabels_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureNodeLabelsIsMutable();
nodeLabels_.addAll(other.nodeLabels_);
}
onChanged();
}
} else {
if (!other.nodeLabels_.isEmpty()) {
if (nodeLabelsBuilder_.isEmpty()) {
nodeLabelsBuilder_.dispose();
nodeLabelsBuilder_ = null;
nodeLabels_ = other.nodeLabels_;
bitField0_ = (bitField0_ & ~0x00000002);
nodeLabelsBuilder_ =
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getNodeLabelsFieldBuilder() : null;
} else {
nodeLabelsBuilder_.addAllMessages(other.nodeLabels_);
}
}
}
if (other.hasSubClusterId()) {
subClusterId_ = other.subClusterId_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
ensureDeprecatedNodeLabelsIsMutable();
deprecatedNodeLabels_.add(bs);
break;
} // case 10
case 18: {
org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto m =
input.readMessage(
org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.PARSER,
extensionRegistry);
if (nodeLabelsBuilder_ == null) {
ensureNodeLabelsIsMutable();
nodeLabels_.add(m);
} else {
nodeLabelsBuilder_.addMessage(m);
}
break;
} // case 18
case 26: {
subClusterId_ = input.readBytes();
bitField0_ |= 0x00000004;
break;
} // case 26
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList deprecatedNodeLabels_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY;
private void ensureDeprecatedNodeLabelsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
deprecatedNodeLabels_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(deprecatedNodeLabels_);
bitField0_ |= 0x00000001;
}
}
/**
* repeated string deprecatedNodeLabels = 1;
* @return A list containing the deprecatedNodeLabels.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
getDeprecatedNodeLabelsList() {
return deprecatedNodeLabels_.getUnmodifiableView();
}
/**
* repeated string deprecatedNodeLabels = 1;
* @return The count of deprecatedNodeLabels.
*/
public int getDeprecatedNodeLabelsCount() {
return deprecatedNodeLabels_.size();
}
/**
* repeated string deprecatedNodeLabels = 1;
* @param index The index of the element to return.
* @return The deprecatedNodeLabels at the given index.
*/
public java.lang.String getDeprecatedNodeLabels(int index) {
return deprecatedNodeLabels_.get(index);
}
/**
* repeated string deprecatedNodeLabels = 1;
* @param index The index of the value to return.
* @return The bytes of the deprecatedNodeLabels at the given index.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getDeprecatedNodeLabelsBytes(int index) {
return deprecatedNodeLabels_.getByteString(index);
}
/**
* repeated string deprecatedNodeLabels = 1;
* @param index The index to set the value at.
* @param value The deprecatedNodeLabels to set.
* @return This builder for chaining.
*/
public Builder setDeprecatedNodeLabels(
int index, java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureDeprecatedNodeLabelsIsMutable();
deprecatedNodeLabels_.set(index, value);
onChanged();
return this;
}
/**
* repeated string deprecatedNodeLabels = 1;
* @param value The deprecatedNodeLabels to add.
* @return This builder for chaining.
*/
public Builder addDeprecatedNodeLabels(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureDeprecatedNodeLabelsIsMutable();
deprecatedNodeLabels_.add(value);
onChanged();
return this;
}
/**
* repeated string deprecatedNodeLabels = 1;
* @param values The deprecatedNodeLabels to add.
* @return This builder for chaining.
*/
public Builder addAllDeprecatedNodeLabels(
java.lang.Iterable values) {
ensureDeprecatedNodeLabelsIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, deprecatedNodeLabels_);
onChanged();
return this;
}
/**
* repeated string deprecatedNodeLabels = 1;
* @return This builder for chaining.
*/
public Builder clearDeprecatedNodeLabels() {
deprecatedNodeLabels_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* repeated string deprecatedNodeLabels = 1;
* @param value The bytes of the deprecatedNodeLabels to add.
* @return This builder for chaining.
*/
public Builder addDeprecatedNodeLabelsBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
ensureDeprecatedNodeLabelsIsMutable();
deprecatedNodeLabels_.add(value);
onChanged();
return this;
}
private java.util.List nodeLabels_ =
java.util.Collections.emptyList();
private void ensureNodeLabelsIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
nodeLabels_ = new java.util.ArrayList(nodeLabels_);
bitField0_ |= 0x00000002;
}
}
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProtoOrBuilder> nodeLabelsBuilder_;
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;
*/
public java.util.List getNodeLabelsList() {
if (nodeLabelsBuilder_ == null) {
return java.util.Collections.unmodifiableList(nodeLabels_);
} else {
return nodeLabelsBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;
*/
public int getNodeLabelsCount() {
if (nodeLabelsBuilder_ == null) {
return nodeLabels_.size();
} else {
return nodeLabelsBuilder_.getCount();
}
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto getNodeLabels(int index) {
if (nodeLabelsBuilder_ == null) {
return nodeLabels_.get(index);
} else {
return nodeLabelsBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;
*/
public Builder setNodeLabels(
int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto value) {
if (nodeLabelsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNodeLabelsIsMutable();
nodeLabels_.set(index, value);
onChanged();
} else {
nodeLabelsBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;
*/
public Builder setNodeLabels(
int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder builderForValue) {
if (nodeLabelsBuilder_ == null) {
ensureNodeLabelsIsMutable();
nodeLabels_.set(index, builderForValue.build());
onChanged();
} else {
nodeLabelsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;
*/
public Builder addNodeLabels(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto value) {
if (nodeLabelsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNodeLabelsIsMutable();
nodeLabels_.add(value);
onChanged();
} else {
nodeLabelsBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;
*/
public Builder addNodeLabels(
int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto value) {
if (nodeLabelsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNodeLabelsIsMutable();
nodeLabels_.add(index, value);
onChanged();
} else {
nodeLabelsBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;
*/
public Builder addNodeLabels(
org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder builderForValue) {
if (nodeLabelsBuilder_ == null) {
ensureNodeLabelsIsMutable();
nodeLabels_.add(builderForValue.build());
onChanged();
} else {
nodeLabelsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;
*/
public Builder addNodeLabels(
int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder builderForValue) {
if (nodeLabelsBuilder_ == null) {
ensureNodeLabelsIsMutable();
nodeLabels_.add(index, builderForValue.build());
onChanged();
} else {
nodeLabelsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;
*/
public Builder addAllNodeLabels(
java.lang.Iterable extends org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto> values) {
if (nodeLabelsBuilder_ == null) {
ensureNodeLabelsIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, nodeLabels_);
onChanged();
} else {
nodeLabelsBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;
*/
public Builder clearNodeLabels() {
if (nodeLabelsBuilder_ == null) {
nodeLabels_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
nodeLabelsBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;
*/
public Builder removeNodeLabels(int index) {
if (nodeLabelsBuilder_ == null) {
ensureNodeLabelsIsMutable();
nodeLabels_.remove(index);
onChanged();
} else {
nodeLabelsBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder getNodeLabelsBuilder(
int index) {
return getNodeLabelsFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProtoOrBuilder getNodeLabelsOrBuilder(
int index) {
if (nodeLabelsBuilder_ == null) {
return nodeLabels_.get(index); } else {
return nodeLabelsBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;
*/
public java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProtoOrBuilder>
getNodeLabelsOrBuilderList() {
if (nodeLabelsBuilder_ != null) {
return nodeLabelsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(nodeLabels_);
}
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder addNodeLabelsBuilder() {
return getNodeLabelsFieldBuilder().addBuilder(
org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder addNodeLabelsBuilder(
int index) {
return getNodeLabelsFieldBuilder().addBuilder(
index, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2;
*/
public java.util.List
getNodeLabelsBuilderList() {
return getNodeLabelsFieldBuilder().getBuilderList();
}
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProtoOrBuilder>
getNodeLabelsFieldBuilder() {
if (nodeLabelsBuilder_ == null) {
nodeLabelsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProtoOrBuilder>(
nodeLabels_,
((bitField0_ & 0x00000002) != 0),
getParentForChildren(),
isClean());
nodeLabels_ = null;
}
return nodeLabelsBuilder_;
}
private java.lang.Object subClusterId_ = "";
/**
* optional string sub_cluster_id = 3;
* @return Whether the subClusterId field is set.
*/
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional string sub_cluster_id = 3;
* @return The subClusterId.
*/
public java.lang.String getSubClusterId() {
java.lang.Object ref = subClusterId_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
subClusterId_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string sub_cluster_id = 3;
* @return The bytes for subClusterId.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes() {
java.lang.Object ref = subClusterId_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
subClusterId_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string sub_cluster_id = 3;
* @param value The subClusterId to set.
* @return This builder for chaining.
*/
public Builder setSubClusterId(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
subClusterId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* optional string sub_cluster_id = 3;
* @return This builder for chaining.
*/
public Builder clearSubClusterId() {
subClusterId_ = getDefaultInstance().getSubClusterId();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
* optional string sub_cluster_id = 3;
* @param value The bytes for subClusterId to set.
* @return This builder for chaining.
*/
public Builder setSubClusterIdBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
subClusterId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.AddToClusterNodeLabelsRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.AddToClusterNodeLabelsRequestProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public AddToClusterNodeLabelsRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface AddToClusterNodeLabelsResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.AddToClusterNodeLabelsResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.yarn.AddToClusterNodeLabelsResponseProto}
*/
public static final class AddToClusterNodeLabelsResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.AddToClusterNodeLabelsResponseProto)
AddToClusterNodeLabelsResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use AddToClusterNodeLabelsResponseProto.newBuilder() to construct.
private AddToClusterNodeLabelsResponseProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private AddToClusterNodeLabelsResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new AddToClusterNodeLabelsResponseProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_AddToClusterNodeLabelsResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_AddToClusterNodeLabelsResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.AddToClusterNodeLabelsResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.AddToClusterNodeLabelsResponseProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_AddToClusterNodeLabelsResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_AddToClusterNodeLabelsResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_AddToClusterNodeLabelsResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.AddToClusterNodeLabelsResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.AddToClusterNodeLabelsResponseProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public AddToClusterNodeLabelsResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface RemoveFromClusterNodeLabelsRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.RemoveFromClusterNodeLabelsRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* repeated string nodeLabels = 1;
* @return A list containing the nodeLabels.
*/
java.util.List
getNodeLabelsList();
/**
* repeated string nodeLabels = 1;
* @return The count of nodeLabels.
*/
int getNodeLabelsCount();
/**
* repeated string nodeLabels = 1;
* @param index The index of the element to return.
* @return The nodeLabels at the given index.
*/
java.lang.String getNodeLabels(int index);
/**
* repeated string nodeLabels = 1;
* @param index The index of the value to return.
* @return The bytes of the nodeLabels at the given index.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getNodeLabelsBytes(int index);
/**
* optional string sub_cluster_id = 2;
* @return Whether the subClusterId field is set.
*/
boolean hasSubClusterId();
/**
* optional string sub_cluster_id = 2;
* @return The subClusterId.
*/
java.lang.String getSubClusterId();
/**
* optional string sub_cluster_id = 2;
* @return The bytes for subClusterId.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes();
}
/**
* Protobuf type {@code hadoop.yarn.RemoveFromClusterNodeLabelsRequestProto}
*/
public static final class RemoveFromClusterNodeLabelsRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.RemoveFromClusterNodeLabelsRequestProto)
RemoveFromClusterNodeLabelsRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use RemoveFromClusterNodeLabelsRequestProto.newBuilder() to construct.
private RemoveFromClusterNodeLabelsRequestProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private RemoveFromClusterNodeLabelsRequestProto() {
nodeLabels_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY;
subClusterId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new RemoveFromClusterNodeLabelsRequestProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RemoveFromClusterNodeLabelsRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RemoveFromClusterNodeLabelsRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto.Builder.class);
}
private int bitField0_;
public static final int NODELABELS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList nodeLabels_;
/**
* repeated string nodeLabels = 1;
* @return A list containing the nodeLabels.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
getNodeLabelsList() {
return nodeLabels_;
}
/**
* repeated string nodeLabels = 1;
* @return The count of nodeLabels.
*/
public int getNodeLabelsCount() {
return nodeLabels_.size();
}
/**
* repeated string nodeLabels = 1;
* @param index The index of the element to return.
* @return The nodeLabels at the given index.
*/
public java.lang.String getNodeLabels(int index) {
return nodeLabels_.get(index);
}
/**
* repeated string nodeLabels = 1;
* @param index The index of the value to return.
* @return The bytes of the nodeLabels at the given index.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getNodeLabelsBytes(int index) {
return nodeLabels_.getByteString(index);
}
public static final int SUB_CLUSTER_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object subClusterId_ = "";
/**
* optional string sub_cluster_id = 2;
* @return Whether the subClusterId field is set.
*/
@java.lang.Override
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string sub_cluster_id = 2;
* @return The subClusterId.
*/
@java.lang.Override
public java.lang.String getSubClusterId() {
java.lang.Object ref = subClusterId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
subClusterId_ = s;
}
return s;
}
}
/**
* optional string sub_cluster_id = 2;
* @return The bytes for subClusterId.
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes() {
java.lang.Object ref = subClusterId_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
subClusterId_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < nodeLabels_.size(); i++) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, nodeLabels_.getRaw(i));
}
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, subClusterId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < nodeLabels_.size(); i++) {
dataSize += computeStringSizeNoTag(nodeLabels_.getRaw(i));
}
size += dataSize;
size += 1 * getNodeLabelsList().size();
}
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, subClusterId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto) obj;
if (!getNodeLabelsList()
.equals(other.getNodeLabelsList())) return false;
if (hasSubClusterId() != other.hasSubClusterId()) return false;
if (hasSubClusterId()) {
if (!getSubClusterId()
.equals(other.getSubClusterId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getNodeLabelsCount() > 0) {
hash = (37 * hash) + NODELABELS_FIELD_NUMBER;
hash = (53 * hash) + getNodeLabelsList().hashCode();
}
if (hasSubClusterId()) {
hash = (37 * hash) + SUB_CLUSTER_ID_FIELD_NUMBER;
hash = (53 * hash) + getSubClusterId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.RemoveFromClusterNodeLabelsRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.RemoveFromClusterNodeLabelsRequestProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RemoveFromClusterNodeLabelsRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RemoveFromClusterNodeLabelsRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
nodeLabels_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
subClusterId_ = "";
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RemoveFromClusterNodeLabelsRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto result) {
if (((bitField0_ & 0x00000001) != 0)) {
nodeLabels_ = nodeLabels_.getUnmodifiableView();
bitField0_ = (bitField0_ & ~0x00000001);
}
result.nodeLabels_ = nodeLabels_;
}
private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.subClusterId_ = subClusterId_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto.getDefaultInstance()) return this;
if (!other.nodeLabels_.isEmpty()) {
if (nodeLabels_.isEmpty()) {
nodeLabels_ = other.nodeLabels_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureNodeLabelsIsMutable();
nodeLabels_.addAll(other.nodeLabels_);
}
onChanged();
}
if (other.hasSubClusterId()) {
subClusterId_ = other.subClusterId_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
ensureNodeLabelsIsMutable();
nodeLabels_.add(bs);
break;
} // case 10
case 18: {
subClusterId_ = input.readBytes();
bitField0_ |= 0x00000002;
break;
} // case 18
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList nodeLabels_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY;
private void ensureNodeLabelsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
nodeLabels_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(nodeLabels_);
bitField0_ |= 0x00000001;
}
}
/**
* repeated string nodeLabels = 1;
* @return A list containing the nodeLabels.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
getNodeLabelsList() {
return nodeLabels_.getUnmodifiableView();
}
/**
* repeated string nodeLabels = 1;
* @return The count of nodeLabels.
*/
public int getNodeLabelsCount() {
return nodeLabels_.size();
}
/**
* repeated string nodeLabels = 1;
* @param index The index of the element to return.
* @return The nodeLabels at the given index.
*/
public java.lang.String getNodeLabels(int index) {
return nodeLabels_.get(index);
}
/**
* repeated string nodeLabels = 1;
* @param index The index of the value to return.
* @return The bytes of the nodeLabels at the given index.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getNodeLabelsBytes(int index) {
return nodeLabels_.getByteString(index);
}
/**
* repeated string nodeLabels = 1;
* @param index The index to set the value at.
* @param value The nodeLabels to set.
* @return This builder for chaining.
*/
public Builder setNodeLabels(
int index, java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureNodeLabelsIsMutable();
nodeLabels_.set(index, value);
onChanged();
return this;
}
/**
* repeated string nodeLabels = 1;
* @param value The nodeLabels to add.
* @return This builder for chaining.
*/
public Builder addNodeLabels(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureNodeLabelsIsMutable();
nodeLabels_.add(value);
onChanged();
return this;
}
/**
* repeated string nodeLabels = 1;
* @param values The nodeLabels to add.
* @return This builder for chaining.
*/
public Builder addAllNodeLabels(
java.lang.Iterable values) {
ensureNodeLabelsIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, nodeLabels_);
onChanged();
return this;
}
/**
* repeated string nodeLabels = 1;
* @return This builder for chaining.
*/
public Builder clearNodeLabels() {
nodeLabels_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* repeated string nodeLabels = 1;
* @param value The bytes of the nodeLabels to add.
* @return This builder for chaining.
*/
public Builder addNodeLabelsBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
ensureNodeLabelsIsMutable();
nodeLabels_.add(value);
onChanged();
return this;
}
private java.lang.Object subClusterId_ = "";
/**
* optional string sub_cluster_id = 2;
* @return Whether the subClusterId field is set.
*/
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional string sub_cluster_id = 2;
* @return The subClusterId.
*/
public java.lang.String getSubClusterId() {
java.lang.Object ref = subClusterId_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
subClusterId_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string sub_cluster_id = 2;
* @return The bytes for subClusterId.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes() {
java.lang.Object ref = subClusterId_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
subClusterId_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string sub_cluster_id = 2;
* @param value The subClusterId to set.
* @return This builder for chaining.
*/
public Builder setSubClusterId(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
subClusterId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* optional string sub_cluster_id = 2;
* @return This builder for chaining.
*/
public Builder clearSubClusterId() {
subClusterId_ = getDefaultInstance().getSubClusterId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* optional string sub_cluster_id = 2;
* @param value The bytes for subClusterId to set.
* @return This builder for chaining.
*/
public Builder setSubClusterIdBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
subClusterId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.RemoveFromClusterNodeLabelsRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.RemoveFromClusterNodeLabelsRequestProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public RemoveFromClusterNodeLabelsRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface RemoveFromClusterNodeLabelsResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.RemoveFromClusterNodeLabelsResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.yarn.RemoveFromClusterNodeLabelsResponseProto}
*/
public static final class RemoveFromClusterNodeLabelsResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.RemoveFromClusterNodeLabelsResponseProto)
RemoveFromClusterNodeLabelsResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use RemoveFromClusterNodeLabelsResponseProto.newBuilder() to construct.
private RemoveFromClusterNodeLabelsResponseProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private RemoveFromClusterNodeLabelsResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new RemoveFromClusterNodeLabelsResponseProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RemoveFromClusterNodeLabelsResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RemoveFromClusterNodeLabelsResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.RemoveFromClusterNodeLabelsResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.RemoveFromClusterNodeLabelsResponseProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RemoveFromClusterNodeLabelsResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RemoveFromClusterNodeLabelsResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RemoveFromClusterNodeLabelsResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.RemoveFromClusterNodeLabelsResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.RemoveFromClusterNodeLabelsResponseProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public RemoveFromClusterNodeLabelsResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface ReplaceLabelsOnNodeRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.ReplaceLabelsOnNodeRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;
*/
java.util.List
getNodeToLabelsList();
/**
* repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;
*/
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto getNodeToLabels(int index);
/**
* repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;
*/
int getNodeToLabelsCount();
/**
* repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;
*/
java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProtoOrBuilder>
getNodeToLabelsOrBuilderList();
/**
* repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;
*/
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProtoOrBuilder getNodeToLabelsOrBuilder(
int index);
/**
* optional bool failOnUnknownNodes = 2;
* @return Whether the failOnUnknownNodes field is set.
*/
boolean hasFailOnUnknownNodes();
/**
* optional bool failOnUnknownNodes = 2;
* @return The failOnUnknownNodes.
*/
boolean getFailOnUnknownNodes();
/**
* optional string sub_cluster_id = 3;
* @return Whether the subClusterId field is set.
*/
boolean hasSubClusterId();
/**
* optional string sub_cluster_id = 3;
* @return The subClusterId.
*/
java.lang.String getSubClusterId();
/**
* optional string sub_cluster_id = 3;
* @return The bytes for subClusterId.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes();
}
/**
* Protobuf type {@code hadoop.yarn.ReplaceLabelsOnNodeRequestProto}
*/
public static final class ReplaceLabelsOnNodeRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.ReplaceLabelsOnNodeRequestProto)
ReplaceLabelsOnNodeRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use ReplaceLabelsOnNodeRequestProto.newBuilder() to construct.
private ReplaceLabelsOnNodeRequestProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private ReplaceLabelsOnNodeRequestProto() {
nodeToLabels_ = java.util.Collections.emptyList();
subClusterId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ReplaceLabelsOnNodeRequestProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_ReplaceLabelsOnNodeRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_ReplaceLabelsOnNodeRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto.Builder.class);
}
private int bitField0_;
public static final int NODETOLABELS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List nodeToLabels_;
/**
* repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;
*/
@java.lang.Override
public java.util.List getNodeToLabelsList() {
return nodeToLabels_;
}
/**
* repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;
*/
@java.lang.Override
public java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProtoOrBuilder>
getNodeToLabelsOrBuilderList() {
return nodeToLabels_;
}
/**
* repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;
*/
@java.lang.Override
public int getNodeToLabelsCount() {
return nodeToLabels_.size();
}
/**
* repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto getNodeToLabels(int index) {
return nodeToLabels_.get(index);
}
/**
* repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProtoOrBuilder getNodeToLabelsOrBuilder(
int index) {
return nodeToLabels_.get(index);
}
public static final int FAILONUNKNOWNNODES_FIELD_NUMBER = 2;
private boolean failOnUnknownNodes_ = false;
/**
* optional bool failOnUnknownNodes = 2;
* @return Whether the failOnUnknownNodes field is set.
*/
@java.lang.Override
public boolean hasFailOnUnknownNodes() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional bool failOnUnknownNodes = 2;
* @return The failOnUnknownNodes.
*/
@java.lang.Override
public boolean getFailOnUnknownNodes() {
return failOnUnknownNodes_;
}
public static final int SUB_CLUSTER_ID_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object subClusterId_ = "";
/**
* optional string sub_cluster_id = 3;
* @return Whether the subClusterId field is set.
*/
@java.lang.Override
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional string sub_cluster_id = 3;
* @return The subClusterId.
*/
@java.lang.Override
public java.lang.String getSubClusterId() {
java.lang.Object ref = subClusterId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
subClusterId_ = s;
}
return s;
}
}
/**
* optional string sub_cluster_id = 3;
* @return The bytes for subClusterId.
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes() {
java.lang.Object ref = subClusterId_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
subClusterId_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < nodeToLabels_.size(); i++) {
output.writeMessage(1, nodeToLabels_.get(i));
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeBool(2, failOnUnknownNodes_);
}
if (((bitField0_ & 0x00000002) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, subClusterId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < nodeToLabels_.size(); i++) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, nodeToLabels_.get(i));
}
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeBoolSize(2, failOnUnknownNodes_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, subClusterId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto) obj;
if (!getNodeToLabelsList()
.equals(other.getNodeToLabelsList())) return false;
if (hasFailOnUnknownNodes() != other.hasFailOnUnknownNodes()) return false;
if (hasFailOnUnknownNodes()) {
if (getFailOnUnknownNodes()
!= other.getFailOnUnknownNodes()) return false;
}
if (hasSubClusterId() != other.hasSubClusterId()) return false;
if (hasSubClusterId()) {
if (!getSubClusterId()
.equals(other.getSubClusterId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getNodeToLabelsCount() > 0) {
hash = (37 * hash) + NODETOLABELS_FIELD_NUMBER;
hash = (53 * hash) + getNodeToLabelsList().hashCode();
}
if (hasFailOnUnknownNodes()) {
hash = (37 * hash) + FAILONUNKNOWNNODES_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean(
getFailOnUnknownNodes());
}
if (hasSubClusterId()) {
hash = (37 * hash) + SUB_CLUSTER_ID_FIELD_NUMBER;
hash = (53 * hash) + getSubClusterId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.ReplaceLabelsOnNodeRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.ReplaceLabelsOnNodeRequestProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_ReplaceLabelsOnNodeRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_ReplaceLabelsOnNodeRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (nodeToLabelsBuilder_ == null) {
nodeToLabels_ = java.util.Collections.emptyList();
} else {
nodeToLabels_ = null;
nodeToLabelsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
failOnUnknownNodes_ = false;
subClusterId_ = "";
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_ReplaceLabelsOnNodeRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto result) {
if (nodeToLabelsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
nodeToLabels_ = java.util.Collections.unmodifiableList(nodeToLabels_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.nodeToLabels_ = nodeToLabels_;
} else {
result.nodeToLabels_ = nodeToLabelsBuilder_.build();
}
}
private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.failOnUnknownNodes_ = failOnUnknownNodes_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.subClusterId_ = subClusterId_;
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto.getDefaultInstance()) return this;
if (nodeToLabelsBuilder_ == null) {
if (!other.nodeToLabels_.isEmpty()) {
if (nodeToLabels_.isEmpty()) {
nodeToLabels_ = other.nodeToLabels_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureNodeToLabelsIsMutable();
nodeToLabels_.addAll(other.nodeToLabels_);
}
onChanged();
}
} else {
if (!other.nodeToLabels_.isEmpty()) {
if (nodeToLabelsBuilder_.isEmpty()) {
nodeToLabelsBuilder_.dispose();
nodeToLabelsBuilder_ = null;
nodeToLabels_ = other.nodeToLabels_;
bitField0_ = (bitField0_ & ~0x00000001);
nodeToLabelsBuilder_ =
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getNodeToLabelsFieldBuilder() : null;
} else {
nodeToLabelsBuilder_.addAllMessages(other.nodeToLabels_);
}
}
}
if (other.hasFailOnUnknownNodes()) {
setFailOnUnknownNodes(other.getFailOnUnknownNodes());
}
if (other.hasSubClusterId()) {
subClusterId_ = other.subClusterId_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto m =
input.readMessage(
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.PARSER,
extensionRegistry);
if (nodeToLabelsBuilder_ == null) {
ensureNodeToLabelsIsMutable();
nodeToLabels_.add(m);
} else {
nodeToLabelsBuilder_.addMessage(m);
}
break;
} // case 10
case 16: {
failOnUnknownNodes_ = input.readBool();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26: {
subClusterId_ = input.readBytes();
bitField0_ |= 0x00000004;
break;
} // case 26
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List nodeToLabels_ =
java.util.Collections.emptyList();
private void ensureNodeToLabelsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
nodeToLabels_ = new java.util.ArrayList(nodeToLabels_);
bitField0_ |= 0x00000001;
}
}
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProtoOrBuilder> nodeToLabelsBuilder_;
/**
* repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;
*/
public java.util.List getNodeToLabelsList() {
if (nodeToLabelsBuilder_ == null) {
return java.util.Collections.unmodifiableList(nodeToLabels_);
} else {
return nodeToLabelsBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;
*/
public int getNodeToLabelsCount() {
if (nodeToLabelsBuilder_ == null) {
return nodeToLabels_.size();
} else {
return nodeToLabelsBuilder_.getCount();
}
}
/**
* repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto getNodeToLabels(int index) {
if (nodeToLabelsBuilder_ == null) {
return nodeToLabels_.get(index);
} else {
return nodeToLabelsBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;
*/
public Builder setNodeToLabels(
int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto value) {
if (nodeToLabelsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNodeToLabelsIsMutable();
nodeToLabels_.set(index, value);
onChanged();
} else {
nodeToLabelsBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;
*/
public Builder setNodeToLabels(
int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.Builder builderForValue) {
if (nodeToLabelsBuilder_ == null) {
ensureNodeToLabelsIsMutable();
nodeToLabels_.set(index, builderForValue.build());
onChanged();
} else {
nodeToLabelsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;
*/
public Builder addNodeToLabels(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto value) {
if (nodeToLabelsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNodeToLabelsIsMutable();
nodeToLabels_.add(value);
onChanged();
} else {
nodeToLabelsBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;
*/
public Builder addNodeToLabels(
int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto value) {
if (nodeToLabelsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNodeToLabelsIsMutable();
nodeToLabels_.add(index, value);
onChanged();
} else {
nodeToLabelsBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;
*/
public Builder addNodeToLabels(
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.Builder builderForValue) {
if (nodeToLabelsBuilder_ == null) {
ensureNodeToLabelsIsMutable();
nodeToLabels_.add(builderForValue.build());
onChanged();
} else {
nodeToLabelsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;
*/
public Builder addNodeToLabels(
int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.Builder builderForValue) {
if (nodeToLabelsBuilder_ == null) {
ensureNodeToLabelsIsMutable();
nodeToLabels_.add(index, builderForValue.build());
onChanged();
} else {
nodeToLabelsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;
*/
public Builder addAllNodeToLabels(
java.lang.Iterable extends org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto> values) {
if (nodeToLabelsBuilder_ == null) {
ensureNodeToLabelsIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, nodeToLabels_);
onChanged();
} else {
nodeToLabelsBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;
*/
public Builder clearNodeToLabels() {
if (nodeToLabelsBuilder_ == null) {
nodeToLabels_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
nodeToLabelsBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;
*/
public Builder removeNodeToLabels(int index) {
if (nodeToLabelsBuilder_ == null) {
ensureNodeToLabelsIsMutable();
nodeToLabels_.remove(index);
onChanged();
} else {
nodeToLabelsBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.Builder getNodeToLabelsBuilder(
int index) {
return getNodeToLabelsFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProtoOrBuilder getNodeToLabelsOrBuilder(
int index) {
if (nodeToLabelsBuilder_ == null) {
return nodeToLabels_.get(index); } else {
return nodeToLabelsBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;
*/
public java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProtoOrBuilder>
getNodeToLabelsOrBuilderList() {
if (nodeToLabelsBuilder_ != null) {
return nodeToLabelsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(nodeToLabels_);
}
}
/**
* repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.Builder addNodeToLabelsBuilder() {
return getNodeToLabelsFieldBuilder().addBuilder(
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.Builder addNodeToLabelsBuilder(
int index) {
return getNodeToLabelsFieldBuilder().addBuilder(
index, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1;
*/
public java.util.List
getNodeToLabelsBuilderList() {
return getNodeToLabelsFieldBuilder().getBuilderList();
}
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProtoOrBuilder>
getNodeToLabelsFieldBuilder() {
if (nodeToLabelsBuilder_ == null) {
nodeToLabelsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProtoOrBuilder>(
nodeToLabels_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
nodeToLabels_ = null;
}
return nodeToLabelsBuilder_;
}
private boolean failOnUnknownNodes_ ;
/**
* optional bool failOnUnknownNodes = 2;
* @return Whether the failOnUnknownNodes field is set.
*/
@java.lang.Override
public boolean hasFailOnUnknownNodes() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional bool failOnUnknownNodes = 2;
* @return The failOnUnknownNodes.
*/
@java.lang.Override
public boolean getFailOnUnknownNodes() {
return failOnUnknownNodes_;
}
/**
* optional bool failOnUnknownNodes = 2;
* @param value The failOnUnknownNodes to set.
* @return This builder for chaining.
*/
public Builder setFailOnUnknownNodes(boolean value) {
failOnUnknownNodes_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* optional bool failOnUnknownNodes = 2;
* @return This builder for chaining.
*/
public Builder clearFailOnUnknownNodes() {
bitField0_ = (bitField0_ & ~0x00000002);
failOnUnknownNodes_ = false;
onChanged();
return this;
}
private java.lang.Object subClusterId_ = "";
/**
* optional string sub_cluster_id = 3;
* @return Whether the subClusterId field is set.
*/
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional string sub_cluster_id = 3;
* @return The subClusterId.
*/
public java.lang.String getSubClusterId() {
java.lang.Object ref = subClusterId_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
subClusterId_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string sub_cluster_id = 3;
* @return The bytes for subClusterId.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes() {
java.lang.Object ref = subClusterId_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
subClusterId_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string sub_cluster_id = 3;
* @param value The subClusterId to set.
* @return This builder for chaining.
*/
public Builder setSubClusterId(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
subClusterId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* optional string sub_cluster_id = 3;
* @return This builder for chaining.
*/
public Builder clearSubClusterId() {
subClusterId_ = getDefaultInstance().getSubClusterId();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
* optional string sub_cluster_id = 3;
* @param value The bytes for subClusterId to set.
* @return This builder for chaining.
*/
public Builder setSubClusterIdBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
subClusterId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.ReplaceLabelsOnNodeRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.ReplaceLabelsOnNodeRequestProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public ReplaceLabelsOnNodeRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface ReplaceLabelsOnNodeResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.ReplaceLabelsOnNodeResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.yarn.ReplaceLabelsOnNodeResponseProto}
*/
public static final class ReplaceLabelsOnNodeResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.ReplaceLabelsOnNodeResponseProto)
ReplaceLabelsOnNodeResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use ReplaceLabelsOnNodeResponseProto.newBuilder() to construct.
private ReplaceLabelsOnNodeResponseProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private ReplaceLabelsOnNodeResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ReplaceLabelsOnNodeResponseProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_ReplaceLabelsOnNodeResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_ReplaceLabelsOnNodeResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.ReplaceLabelsOnNodeResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.ReplaceLabelsOnNodeResponseProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_ReplaceLabelsOnNodeResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_ReplaceLabelsOnNodeResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_ReplaceLabelsOnNodeResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.ReplaceLabelsOnNodeResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.ReplaceLabelsOnNodeResponseProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public ReplaceLabelsOnNodeResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface UpdateNodeLabelsResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.UpdateNodeLabelsResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.yarn.UpdateNodeLabelsResponseProto}
*/
public static final class UpdateNodeLabelsResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.UpdateNodeLabelsResponseProto)
UpdateNodeLabelsResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateNodeLabelsResponseProto.newBuilder() to construct.
private UpdateNodeLabelsResponseProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private UpdateNodeLabelsResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new UpdateNodeLabelsResponseProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_UpdateNodeLabelsResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_UpdateNodeLabelsResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.UpdateNodeLabelsResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.UpdateNodeLabelsResponseProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_UpdateNodeLabelsResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_UpdateNodeLabelsResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_UpdateNodeLabelsResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.UpdateNodeLabelsResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.UpdateNodeLabelsResponseProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public UpdateNodeLabelsResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface CheckForDecommissioningNodesRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.CheckForDecommissioningNodesRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional string sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
boolean hasSubClusterId();
/**
* optional string sub_cluster_id = 1;
* @return The subClusterId.
*/
java.lang.String getSubClusterId();
/**
* optional string sub_cluster_id = 1;
* @return The bytes for subClusterId.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes();
}
/**
* Protobuf type {@code hadoop.yarn.CheckForDecommissioningNodesRequestProto}
*/
public static final class CheckForDecommissioningNodesRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.CheckForDecommissioningNodesRequestProto)
CheckForDecommissioningNodesRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use CheckForDecommissioningNodesRequestProto.newBuilder() to construct.
private CheckForDecommissioningNodesRequestProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private CheckForDecommissioningNodesRequestProto() {
subClusterId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new CheckForDecommissioningNodesRequestProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_CheckForDecommissioningNodesRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_CheckForDecommissioningNodesRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto.Builder.class);
}
private int bitField0_;
public static final int SUB_CLUSTER_ID_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object subClusterId_ = "";
/**
* optional string sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
@java.lang.Override
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string sub_cluster_id = 1;
* @return The subClusterId.
*/
@java.lang.Override
public java.lang.String getSubClusterId() {
java.lang.Object ref = subClusterId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
subClusterId_ = s;
}
return s;
}
}
/**
* optional string sub_cluster_id = 1;
* @return The bytes for subClusterId.
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes() {
java.lang.Object ref = subClusterId_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
subClusterId_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, subClusterId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, subClusterId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto) obj;
if (hasSubClusterId() != other.hasSubClusterId()) return false;
if (hasSubClusterId()) {
if (!getSubClusterId()
.equals(other.getSubClusterId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasSubClusterId()) {
hash = (37 * hash) + SUB_CLUSTER_ID_FIELD_NUMBER;
hash = (53 * hash) + getSubClusterId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.CheckForDecommissioningNodesRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.CheckForDecommissioningNodesRequestProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_CheckForDecommissioningNodesRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_CheckForDecommissioningNodesRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
subClusterId_ = "";
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_CheckForDecommissioningNodesRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.subClusterId_ = subClusterId_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto.getDefaultInstance()) return this;
if (other.hasSubClusterId()) {
subClusterId_ = other.subClusterId_;
bitField0_ |= 0x00000001;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
subClusterId_ = input.readBytes();
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object subClusterId_ = "";
/**
* optional string sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string sub_cluster_id = 1;
* @return The subClusterId.
*/
public java.lang.String getSubClusterId() {
java.lang.Object ref = subClusterId_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
subClusterId_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string sub_cluster_id = 1;
* @return The bytes for subClusterId.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes() {
java.lang.Object ref = subClusterId_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
subClusterId_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string sub_cluster_id = 1;
* @param value The subClusterId to set.
* @return This builder for chaining.
*/
public Builder setSubClusterId(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
subClusterId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional string sub_cluster_id = 1;
* @return This builder for chaining.
*/
public Builder clearSubClusterId() {
subClusterId_ = getDefaultInstance().getSubClusterId();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* optional string sub_cluster_id = 1;
* @param value The bytes for subClusterId to set.
* @return This builder for chaining.
*/
public Builder setSubClusterIdBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
subClusterId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.CheckForDecommissioningNodesRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.CheckForDecommissioningNodesRequestProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public CheckForDecommissioningNodesRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface CheckForDecommissioningNodesResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.CheckForDecommissioningNodesResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* repeated .hadoop.yarn.NodeIdProto decommissioningNodes = 1;
*/
java.util.List
getDecommissioningNodesList();
/**
* repeated .hadoop.yarn.NodeIdProto decommissioningNodes = 1;
*/
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getDecommissioningNodes(int index);
/**
* repeated .hadoop.yarn.NodeIdProto decommissioningNodes = 1;
*/
int getDecommissioningNodesCount();
/**
* repeated .hadoop.yarn.NodeIdProto decommissioningNodes = 1;
*/
java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder>
getDecommissioningNodesOrBuilderList();
/**
* repeated .hadoop.yarn.NodeIdProto decommissioningNodes = 1;
*/
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getDecommissioningNodesOrBuilder(
int index);
}
/**
* Protobuf type {@code hadoop.yarn.CheckForDecommissioningNodesResponseProto}
*/
public static final class CheckForDecommissioningNodesResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.CheckForDecommissioningNodesResponseProto)
CheckForDecommissioningNodesResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use CheckForDecommissioningNodesResponseProto.newBuilder() to construct.
private CheckForDecommissioningNodesResponseProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private CheckForDecommissioningNodesResponseProto() {
decommissioningNodes_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new CheckForDecommissioningNodesResponseProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_CheckForDecommissioningNodesResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_CheckForDecommissioningNodesResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto.Builder.class);
}
public static final int DECOMMISSIONINGNODES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List decommissioningNodes_;
/**
* repeated .hadoop.yarn.NodeIdProto decommissioningNodes = 1;
*/
@java.lang.Override
public java.util.List getDecommissioningNodesList() {
return decommissioningNodes_;
}
/**
* repeated .hadoop.yarn.NodeIdProto decommissioningNodes = 1;
*/
@java.lang.Override
public java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder>
getDecommissioningNodesOrBuilderList() {
return decommissioningNodes_;
}
/**
* repeated .hadoop.yarn.NodeIdProto decommissioningNodes = 1;
*/
@java.lang.Override
public int getDecommissioningNodesCount() {
return decommissioningNodes_.size();
}
/**
* repeated .hadoop.yarn.NodeIdProto decommissioningNodes = 1;
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getDecommissioningNodes(int index) {
return decommissioningNodes_.get(index);
}
/**
* repeated .hadoop.yarn.NodeIdProto decommissioningNodes = 1;
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getDecommissioningNodesOrBuilder(
int index) {
return decommissioningNodes_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < decommissioningNodes_.size(); i++) {
output.writeMessage(1, decommissioningNodes_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < decommissioningNodes_.size(); i++) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, decommissioningNodes_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto) obj;
if (!getDecommissioningNodesList()
.equals(other.getDecommissioningNodesList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getDecommissioningNodesCount() > 0) {
hash = (37 * hash) + DECOMMISSIONINGNODES_FIELD_NUMBER;
hash = (53 * hash) + getDecommissioningNodesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.CheckForDecommissioningNodesResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.CheckForDecommissioningNodesResponseProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_CheckForDecommissioningNodesResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_CheckForDecommissioningNodesResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (decommissioningNodesBuilder_ == null) {
decommissioningNodes_ = java.util.Collections.emptyList();
} else {
decommissioningNodes_ = null;
decommissioningNodesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_CheckForDecommissioningNodesResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto result) {
if (decommissioningNodesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
decommissioningNodes_ = java.util.Collections.unmodifiableList(decommissioningNodes_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.decommissioningNodes_ = decommissioningNodes_;
} else {
result.decommissioningNodes_ = decommissioningNodesBuilder_.build();
}
}
private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto.getDefaultInstance()) return this;
if (decommissioningNodesBuilder_ == null) {
if (!other.decommissioningNodes_.isEmpty()) {
if (decommissioningNodes_.isEmpty()) {
decommissioningNodes_ = other.decommissioningNodes_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureDecommissioningNodesIsMutable();
decommissioningNodes_.addAll(other.decommissioningNodes_);
}
onChanged();
}
} else {
if (!other.decommissioningNodes_.isEmpty()) {
if (decommissioningNodesBuilder_.isEmpty()) {
decommissioningNodesBuilder_.dispose();
decommissioningNodesBuilder_ = null;
decommissioningNodes_ = other.decommissioningNodes_;
bitField0_ = (bitField0_ & ~0x00000001);
decommissioningNodesBuilder_ =
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getDecommissioningNodesFieldBuilder() : null;
} else {
decommissioningNodesBuilder_.addAllMessages(other.decommissioningNodes_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto m =
input.readMessage(
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.PARSER,
extensionRegistry);
if (decommissioningNodesBuilder_ == null) {
ensureDecommissioningNodesIsMutable();
decommissioningNodes_.add(m);
} else {
decommissioningNodesBuilder_.addMessage(m);
}
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List decommissioningNodes_ =
java.util.Collections.emptyList();
private void ensureDecommissioningNodesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
decommissioningNodes_ = new java.util.ArrayList(decommissioningNodes_);
bitField0_ |= 0x00000001;
}
}
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> decommissioningNodesBuilder_;
/**
* repeated .hadoop.yarn.NodeIdProto decommissioningNodes = 1;
*/
public java.util.List getDecommissioningNodesList() {
if (decommissioningNodesBuilder_ == null) {
return java.util.Collections.unmodifiableList(decommissioningNodes_);
} else {
return decommissioningNodesBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.yarn.NodeIdProto decommissioningNodes = 1;
*/
public int getDecommissioningNodesCount() {
if (decommissioningNodesBuilder_ == null) {
return decommissioningNodes_.size();
} else {
return decommissioningNodesBuilder_.getCount();
}
}
/**
* repeated .hadoop.yarn.NodeIdProto decommissioningNodes = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getDecommissioningNodes(int index) {
if (decommissioningNodesBuilder_ == null) {
return decommissioningNodes_.get(index);
} else {
return decommissioningNodesBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.yarn.NodeIdProto decommissioningNodes = 1;
*/
public Builder setDecommissioningNodes(
int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) {
if (decommissioningNodesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDecommissioningNodesIsMutable();
decommissioningNodes_.set(index, value);
onChanged();
} else {
decommissioningNodesBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.NodeIdProto decommissioningNodes = 1;
*/
public Builder setDecommissioningNodes(
int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder builderForValue) {
if (decommissioningNodesBuilder_ == null) {
ensureDecommissioningNodesIsMutable();
decommissioningNodes_.set(index, builderForValue.build());
onChanged();
} else {
decommissioningNodesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.NodeIdProto decommissioningNodes = 1;
*/
public Builder addDecommissioningNodes(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) {
if (decommissioningNodesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDecommissioningNodesIsMutable();
decommissioningNodes_.add(value);
onChanged();
} else {
decommissioningNodesBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.yarn.NodeIdProto decommissioningNodes = 1;
*/
public Builder addDecommissioningNodes(
int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) {
if (decommissioningNodesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDecommissioningNodesIsMutable();
decommissioningNodes_.add(index, value);
onChanged();
} else {
decommissioningNodesBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.NodeIdProto decommissioningNodes = 1;
*/
public Builder addDecommissioningNodes(
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder builderForValue) {
if (decommissioningNodesBuilder_ == null) {
ensureDecommissioningNodesIsMutable();
decommissioningNodes_.add(builderForValue.build());
onChanged();
} else {
decommissioningNodesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.NodeIdProto decommissioningNodes = 1;
*/
public Builder addDecommissioningNodes(
int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder builderForValue) {
if (decommissioningNodesBuilder_ == null) {
ensureDecommissioningNodesIsMutable();
decommissioningNodes_.add(index, builderForValue.build());
onChanged();
} else {
decommissioningNodesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.NodeIdProto decommissioningNodes = 1;
*/
public Builder addAllDecommissioningNodes(
java.lang.Iterable extends org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto> values) {
if (decommissioningNodesBuilder_ == null) {
ensureDecommissioningNodesIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, decommissioningNodes_);
onChanged();
} else {
decommissioningNodesBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.yarn.NodeIdProto decommissioningNodes = 1;
*/
public Builder clearDecommissioningNodes() {
if (decommissioningNodesBuilder_ == null) {
decommissioningNodes_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
decommissioningNodesBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.yarn.NodeIdProto decommissioningNodes = 1;
*/
public Builder removeDecommissioningNodes(int index) {
if (decommissioningNodesBuilder_ == null) {
ensureDecommissioningNodesIsMutable();
decommissioningNodes_.remove(index);
onChanged();
} else {
decommissioningNodesBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.yarn.NodeIdProto decommissioningNodes = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder getDecommissioningNodesBuilder(
int index) {
return getDecommissioningNodesFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.yarn.NodeIdProto decommissioningNodes = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getDecommissioningNodesOrBuilder(
int index) {
if (decommissioningNodesBuilder_ == null) {
return decommissioningNodes_.get(index); } else {
return decommissioningNodesBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.yarn.NodeIdProto decommissioningNodes = 1;
*/
public java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder>
getDecommissioningNodesOrBuilderList() {
if (decommissioningNodesBuilder_ != null) {
return decommissioningNodesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(decommissioningNodes_);
}
}
/**
* repeated .hadoop.yarn.NodeIdProto decommissioningNodes = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder addDecommissioningNodesBuilder() {
return getDecommissioningNodesFieldBuilder().addBuilder(
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.NodeIdProto decommissioningNodes = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder addDecommissioningNodesBuilder(
int index) {
return getDecommissioningNodesFieldBuilder().addBuilder(
index, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.NodeIdProto decommissioningNodes = 1;
*/
public java.util.List
getDecommissioningNodesBuilderList() {
return getDecommissioningNodesFieldBuilder().getBuilderList();
}
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder>
getDecommissioningNodesFieldBuilder() {
if (decommissioningNodesBuilder_ == null) {
decommissioningNodesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder>(
decommissioningNodes_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
decommissioningNodes_ = null;
}
return decommissioningNodesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.CheckForDecommissioningNodesResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.CheckForDecommissioningNodesResponseProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public CheckForDecommissioningNodesResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface RefreshClusterMaxPriorityRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.RefreshClusterMaxPriorityRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional string sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
boolean hasSubClusterId();
/**
* optional string sub_cluster_id = 1;
* @return The subClusterId.
*/
java.lang.String getSubClusterId();
/**
* optional string sub_cluster_id = 1;
* @return The bytes for subClusterId.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes();
}
/**
* Protobuf type {@code hadoop.yarn.RefreshClusterMaxPriorityRequestProto}
*/
public static final class RefreshClusterMaxPriorityRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.RefreshClusterMaxPriorityRequestProto)
RefreshClusterMaxPriorityRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use RefreshClusterMaxPriorityRequestProto.newBuilder() to construct.
private RefreshClusterMaxPriorityRequestProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private RefreshClusterMaxPriorityRequestProto() {
subClusterId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new RefreshClusterMaxPriorityRequestProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshClusterMaxPriorityRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshClusterMaxPriorityRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto.Builder.class);
}
private int bitField0_;
public static final int SUB_CLUSTER_ID_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object subClusterId_ = "";
/**
* optional string sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
@java.lang.Override
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string sub_cluster_id = 1;
* @return The subClusterId.
*/
@java.lang.Override
public java.lang.String getSubClusterId() {
java.lang.Object ref = subClusterId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
subClusterId_ = s;
}
return s;
}
}
/**
* optional string sub_cluster_id = 1;
* @return The bytes for subClusterId.
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes() {
java.lang.Object ref = subClusterId_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
subClusterId_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, subClusterId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, subClusterId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto) obj;
if (hasSubClusterId() != other.hasSubClusterId()) return false;
if (hasSubClusterId()) {
if (!getSubClusterId()
.equals(other.getSubClusterId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasSubClusterId()) {
hash = (37 * hash) + SUB_CLUSTER_ID_FIELD_NUMBER;
hash = (53 * hash) + getSubClusterId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.RefreshClusterMaxPriorityRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.RefreshClusterMaxPriorityRequestProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshClusterMaxPriorityRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshClusterMaxPriorityRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
subClusterId_ = "";
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshClusterMaxPriorityRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.subClusterId_ = subClusterId_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto.getDefaultInstance()) return this;
if (other.hasSubClusterId()) {
subClusterId_ = other.subClusterId_;
bitField0_ |= 0x00000001;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
subClusterId_ = input.readBytes();
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object subClusterId_ = "";
/**
* optional string sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string sub_cluster_id = 1;
* @return The subClusterId.
*/
public java.lang.String getSubClusterId() {
java.lang.Object ref = subClusterId_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
subClusterId_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string sub_cluster_id = 1;
* @return The bytes for subClusterId.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes() {
java.lang.Object ref = subClusterId_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
subClusterId_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string sub_cluster_id = 1;
* @param value The subClusterId to set.
* @return This builder for chaining.
*/
public Builder setSubClusterId(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
subClusterId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional string sub_cluster_id = 1;
* @return This builder for chaining.
*/
public Builder clearSubClusterId() {
subClusterId_ = getDefaultInstance().getSubClusterId();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* optional string sub_cluster_id = 1;
* @param value The bytes for subClusterId to set.
* @return This builder for chaining.
*/
public Builder setSubClusterIdBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
subClusterId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.RefreshClusterMaxPriorityRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.RefreshClusterMaxPriorityRequestProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public RefreshClusterMaxPriorityRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface RefreshClusterMaxPriorityResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.RefreshClusterMaxPriorityResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.yarn.RefreshClusterMaxPriorityResponseProto}
*/
public static final class RefreshClusterMaxPriorityResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.RefreshClusterMaxPriorityResponseProto)
RefreshClusterMaxPriorityResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use RefreshClusterMaxPriorityResponseProto.newBuilder() to construct.
private RefreshClusterMaxPriorityResponseProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private RefreshClusterMaxPriorityResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new RefreshClusterMaxPriorityResponseProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshClusterMaxPriorityResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshClusterMaxPriorityResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.RefreshClusterMaxPriorityResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.RefreshClusterMaxPriorityResponseProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshClusterMaxPriorityResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshClusterMaxPriorityResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_RefreshClusterMaxPriorityResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.RefreshClusterMaxPriorityResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.RefreshClusterMaxPriorityResponseProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public RefreshClusterMaxPriorityResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface NodeIdToLabelsNameProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.NodeIdToLabelsNameProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.NodeIdProto nodeId = 1;
* @return Whether the nodeId field is set.
*/
boolean hasNodeId();
/**
* optional .hadoop.yarn.NodeIdProto nodeId = 1;
* @return The nodeId.
*/
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId();
/**
* optional .hadoop.yarn.NodeIdProto nodeId = 1;
*/
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder();
/**
* repeated string nodeLabels = 2;
* @return A list containing the nodeLabels.
*/
java.util.List
getNodeLabelsList();
/**
* repeated string nodeLabels = 2;
* @return The count of nodeLabels.
*/
int getNodeLabelsCount();
/**
* repeated string nodeLabels = 2;
* @param index The index of the element to return.
* @return The nodeLabels at the given index.
*/
java.lang.String getNodeLabels(int index);
/**
* repeated string nodeLabels = 2;
* @param index The index of the value to return.
* @return The bytes of the nodeLabels at the given index.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getNodeLabelsBytes(int index);
}
/**
* Protobuf type {@code hadoop.yarn.NodeIdToLabelsNameProto}
*/
public static final class NodeIdToLabelsNameProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.NodeIdToLabelsNameProto)
NodeIdToLabelsNameProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use NodeIdToLabelsNameProto.newBuilder() to construct.
private NodeIdToLabelsNameProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private NodeIdToLabelsNameProto() {
nodeLabels_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new NodeIdToLabelsNameProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_NodeIdToLabelsNameProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_NodeIdToLabelsNameProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto.Builder.class);
}
private int bitField0_;
public static final int NODEID_FIELD_NUMBER = 1;
private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto nodeId_;
/**
* optional .hadoop.yarn.NodeIdProto nodeId = 1;
* @return Whether the nodeId field is set.
*/
@java.lang.Override
public boolean hasNodeId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.NodeIdProto nodeId = 1;
* @return The nodeId.
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId() {
return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
}
/**
* optional .hadoop.yarn.NodeIdProto nodeId = 1;
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder() {
return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
}
public static final int NODELABELS_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList nodeLabels_;
/**
* repeated string nodeLabels = 2;
* @return A list containing the nodeLabels.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
getNodeLabelsList() {
return nodeLabels_;
}
/**
* repeated string nodeLabels = 2;
* @return The count of nodeLabels.
*/
public int getNodeLabelsCount() {
return nodeLabels_.size();
}
/**
* repeated string nodeLabels = 2;
* @param index The index of the element to return.
* @return The nodeLabels at the given index.
*/
public java.lang.String getNodeLabels(int index) {
return nodeLabels_.get(index);
}
/**
* repeated string nodeLabels = 2;
* @param index The index of the value to return.
* @return The bytes of the nodeLabels at the given index.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getNodeLabelsBytes(int index) {
return nodeLabels_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getNodeId());
}
for (int i = 0; i < nodeLabels_.size(); i++) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, nodeLabels_.getRaw(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getNodeId());
}
{
int dataSize = 0;
for (int i = 0; i < nodeLabels_.size(); i++) {
dataSize += computeStringSizeNoTag(nodeLabels_.getRaw(i));
}
size += dataSize;
size += 1 * getNodeLabelsList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto) obj;
if (hasNodeId() != other.hasNodeId()) return false;
if (hasNodeId()) {
if (!getNodeId()
.equals(other.getNodeId())) return false;
}
if (!getNodeLabelsList()
.equals(other.getNodeLabelsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasNodeId()) {
hash = (37 * hash) + NODEID_FIELD_NUMBER;
hash = (53 * hash) + getNodeId().hashCode();
}
if (getNodeLabelsCount() > 0) {
hash = (37 * hash) + NODELABELS_FIELD_NUMBER;
hash = (53 * hash) + getNodeLabelsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.NodeIdToLabelsNameProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.NodeIdToLabelsNameProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_NodeIdToLabelsNameProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_NodeIdToLabelsNameProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getNodeIdFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
nodeId_ = null;
if (nodeIdBuilder_ != null) {
nodeIdBuilder_.dispose();
nodeIdBuilder_ = null;
}
nodeLabels_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_NodeIdToLabelsNameProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto result) {
if (((bitField0_ & 0x00000002) != 0)) {
nodeLabels_ = nodeLabels_.getUnmodifiableView();
bitField0_ = (bitField0_ & ~0x00000002);
}
result.nodeLabels_ = nodeLabels_;
}
private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.nodeId_ = nodeIdBuilder_ == null
? nodeId_
: nodeIdBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto.getDefaultInstance()) return this;
if (other.hasNodeId()) {
mergeNodeId(other.getNodeId());
}
if (!other.nodeLabels_.isEmpty()) {
if (nodeLabels_.isEmpty()) {
nodeLabels_ = other.nodeLabels_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureNodeLabelsIsMutable();
nodeLabels_.addAll(other.nodeLabels_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getNodeIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
ensureNodeLabelsIsMutable();
nodeLabels_.add(bs);
break;
} // case 18
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto nodeId_;
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> nodeIdBuilder_;
/**
* optional .hadoop.yarn.NodeIdProto nodeId = 1;
* @return Whether the nodeId field is set.
*/
public boolean hasNodeId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.NodeIdProto nodeId = 1;
* @return The nodeId.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId() {
if (nodeIdBuilder_ == null) {
return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
} else {
return nodeIdBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.NodeIdProto nodeId = 1;
*/
public Builder setNodeId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) {
if (nodeIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
nodeId_ = value;
} else {
nodeIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeIdProto nodeId = 1;
*/
public Builder setNodeId(
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder builderForValue) {
if (nodeIdBuilder_ == null) {
nodeId_ = builderForValue.build();
} else {
nodeIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeIdProto nodeId = 1;
*/
public Builder mergeNodeId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) {
if (nodeIdBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
nodeId_ != null &&
nodeId_ != org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance()) {
getNodeIdBuilder().mergeFrom(value);
} else {
nodeId_ = value;
}
} else {
nodeIdBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeIdProto nodeId = 1;
*/
public Builder clearNodeId() {
bitField0_ = (bitField0_ & ~0x00000001);
nodeId_ = null;
if (nodeIdBuilder_ != null) {
nodeIdBuilder_.dispose();
nodeIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeIdProto nodeId = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder getNodeIdBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getNodeIdFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.NodeIdProto nodeId = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder() {
if (nodeIdBuilder_ != null) {
return nodeIdBuilder_.getMessageOrBuilder();
} else {
return nodeId_ == null ?
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
}
}
/**
* optional .hadoop.yarn.NodeIdProto nodeId = 1;
*/
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder>
getNodeIdFieldBuilder() {
if (nodeIdBuilder_ == null) {
nodeIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder>(
getNodeId(),
getParentForChildren(),
isClean());
nodeId_ = null;
}
return nodeIdBuilder_;
}
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList nodeLabels_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY;
private void ensureNodeLabelsIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
nodeLabels_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(nodeLabels_);
bitField0_ |= 0x00000002;
}
}
/**
* repeated string nodeLabels = 2;
* @return A list containing the nodeLabels.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
getNodeLabelsList() {
return nodeLabels_.getUnmodifiableView();
}
/**
* repeated string nodeLabels = 2;
* @return The count of nodeLabels.
*/
public int getNodeLabelsCount() {
return nodeLabels_.size();
}
/**
* repeated string nodeLabels = 2;
* @param index The index of the element to return.
* @return The nodeLabels at the given index.
*/
public java.lang.String getNodeLabels(int index) {
return nodeLabels_.get(index);
}
/**
* repeated string nodeLabels = 2;
* @param index The index of the value to return.
* @return The bytes of the nodeLabels at the given index.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getNodeLabelsBytes(int index) {
return nodeLabels_.getByteString(index);
}
/**
* repeated string nodeLabels = 2;
* @param index The index to set the value at.
* @param value The nodeLabels to set.
* @return This builder for chaining.
*/
public Builder setNodeLabels(
int index, java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureNodeLabelsIsMutable();
nodeLabels_.set(index, value);
onChanged();
return this;
}
/**
* repeated string nodeLabels = 2;
* @param value The nodeLabels to add.
* @return This builder for chaining.
*/
public Builder addNodeLabels(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureNodeLabelsIsMutable();
nodeLabels_.add(value);
onChanged();
return this;
}
/**
* repeated string nodeLabels = 2;
* @param values The nodeLabels to add.
* @return This builder for chaining.
*/
public Builder addAllNodeLabels(
java.lang.Iterable values) {
ensureNodeLabelsIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, nodeLabels_);
onChanged();
return this;
}
/**
* repeated string nodeLabels = 2;
* @return This builder for chaining.
*/
public Builder clearNodeLabels() {
nodeLabels_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* repeated string nodeLabels = 2;
* @param value The bytes of the nodeLabels to add.
* @return This builder for chaining.
*/
public Builder addNodeLabelsBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
ensureNodeLabelsIsMutable();
nodeLabels_.add(value);
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.NodeIdToLabelsNameProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.NodeIdToLabelsNameProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public NodeIdToLabelsNameProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface NodesToAttributesMappingRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.NodesToAttributesMappingRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.AttributeMappingOperationTypeProto operation = 1 [default = REPLACE];
* @return Whether the operation field is set.
*/
boolean hasOperation();
/**
* optional .hadoop.yarn.AttributeMappingOperationTypeProto operation = 1 [default = REPLACE];
* @return The operation.
*/
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AttributeMappingOperationTypeProto getOperation();
/**
* repeated .hadoop.yarn.NodeToAttributesProto nodeToAttributes = 2;
*/
java.util.List
getNodeToAttributesList();
/**
* repeated .hadoop.yarn.NodeToAttributesProto nodeToAttributes = 2;
*/
org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto getNodeToAttributes(int index);
/**
* repeated .hadoop.yarn.NodeToAttributesProto nodeToAttributes = 2;
*/
int getNodeToAttributesCount();
/**
* repeated .hadoop.yarn.NodeToAttributesProto nodeToAttributes = 2;
*/
java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProtoOrBuilder>
getNodeToAttributesOrBuilderList();
/**
* repeated .hadoop.yarn.NodeToAttributesProto nodeToAttributes = 2;
*/
org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProtoOrBuilder getNodeToAttributesOrBuilder(
int index);
/**
* optional bool failOnUnknownNodes = 3;
* @return Whether the failOnUnknownNodes field is set.
*/
boolean hasFailOnUnknownNodes();
/**
* optional bool failOnUnknownNodes = 3;
* @return The failOnUnknownNodes.
*/
boolean getFailOnUnknownNodes();
/**
* optional string sub_cluster_id = 4;
* @return Whether the subClusterId field is set.
*/
boolean hasSubClusterId();
/**
* optional string sub_cluster_id = 4;
* @return The subClusterId.
*/
java.lang.String getSubClusterId();
/**
* optional string sub_cluster_id = 4;
* @return The bytes for subClusterId.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes();
}
/**
* Protobuf type {@code hadoop.yarn.NodesToAttributesMappingRequestProto}
*/
public static final class NodesToAttributesMappingRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.NodesToAttributesMappingRequestProto)
NodesToAttributesMappingRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use NodesToAttributesMappingRequestProto.newBuilder() to construct.
private NodesToAttributesMappingRequestProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private NodesToAttributesMappingRequestProto() {
operation_ = 1;
nodeToAttributes_ = java.util.Collections.emptyList();
subClusterId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new NodesToAttributesMappingRequestProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_NodesToAttributesMappingRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_NodesToAttributesMappingRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto.Builder.class);
}
private int bitField0_;
public static final int OPERATION_FIELD_NUMBER = 1;
private int operation_ = 1;
/**
* optional .hadoop.yarn.AttributeMappingOperationTypeProto operation = 1 [default = REPLACE];
* @return Whether the operation field is set.
*/
@java.lang.Override public boolean hasOperation() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.AttributeMappingOperationTypeProto operation = 1 [default = REPLACE];
* @return The operation.
*/
@java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AttributeMappingOperationTypeProto getOperation() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AttributeMappingOperationTypeProto result = org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AttributeMappingOperationTypeProto.forNumber(operation_);
return result == null ? org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AttributeMappingOperationTypeProto.REPLACE : result;
}
public static final int NODETOATTRIBUTES_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private java.util.List nodeToAttributes_;
/**
* repeated .hadoop.yarn.NodeToAttributesProto nodeToAttributes = 2;
*/
@java.lang.Override
public java.util.List getNodeToAttributesList() {
return nodeToAttributes_;
}
/**
* repeated .hadoop.yarn.NodeToAttributesProto nodeToAttributes = 2;
*/
@java.lang.Override
public java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProtoOrBuilder>
getNodeToAttributesOrBuilderList() {
return nodeToAttributes_;
}
/**
* repeated .hadoop.yarn.NodeToAttributesProto nodeToAttributes = 2;
*/
@java.lang.Override
public int getNodeToAttributesCount() {
return nodeToAttributes_.size();
}
/**
* repeated .hadoop.yarn.NodeToAttributesProto nodeToAttributes = 2;
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto getNodeToAttributes(int index) {
return nodeToAttributes_.get(index);
}
/**
* repeated .hadoop.yarn.NodeToAttributesProto nodeToAttributes = 2;
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProtoOrBuilder getNodeToAttributesOrBuilder(
int index) {
return nodeToAttributes_.get(index);
}
public static final int FAILONUNKNOWNNODES_FIELD_NUMBER = 3;
private boolean failOnUnknownNodes_ = false;
/**
* optional bool failOnUnknownNodes = 3;
* @return Whether the failOnUnknownNodes field is set.
*/
@java.lang.Override
public boolean hasFailOnUnknownNodes() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional bool failOnUnknownNodes = 3;
* @return The failOnUnknownNodes.
*/
@java.lang.Override
public boolean getFailOnUnknownNodes() {
return failOnUnknownNodes_;
}
public static final int SUB_CLUSTER_ID_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object subClusterId_ = "";
/**
* optional string sub_cluster_id = 4;
* @return Whether the subClusterId field is set.
*/
@java.lang.Override
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional string sub_cluster_id = 4;
* @return The subClusterId.
*/
@java.lang.Override
public java.lang.String getSubClusterId() {
java.lang.Object ref = subClusterId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
subClusterId_ = s;
}
return s;
}
}
/**
* optional string sub_cluster_id = 4;
* @return The bytes for subClusterId.
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes() {
java.lang.Object ref = subClusterId_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
subClusterId_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
for (int i = 0; i < getNodeToAttributesCount(); i++) {
if (!getNodeToAttributes(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeEnum(1, operation_);
}
for (int i = 0; i < nodeToAttributes_.size(); i++) {
output.writeMessage(2, nodeToAttributes_.get(i));
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeBool(3, failOnUnknownNodes_);
}
if (((bitField0_ & 0x00000004) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 4, subClusterId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeEnumSize(1, operation_);
}
for (int i = 0; i < nodeToAttributes_.size(); i++) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(2, nodeToAttributes_.get(i));
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeBoolSize(3, failOnUnknownNodes_);
}
if (((bitField0_ & 0x00000004) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(4, subClusterId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto) obj;
if (hasOperation() != other.hasOperation()) return false;
if (hasOperation()) {
if (operation_ != other.operation_) return false;
}
if (!getNodeToAttributesList()
.equals(other.getNodeToAttributesList())) return false;
if (hasFailOnUnknownNodes() != other.hasFailOnUnknownNodes()) return false;
if (hasFailOnUnknownNodes()) {
if (getFailOnUnknownNodes()
!= other.getFailOnUnknownNodes()) return false;
}
if (hasSubClusterId() != other.hasSubClusterId()) return false;
if (hasSubClusterId()) {
if (!getSubClusterId()
.equals(other.getSubClusterId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasOperation()) {
hash = (37 * hash) + OPERATION_FIELD_NUMBER;
hash = (53 * hash) + operation_;
}
if (getNodeToAttributesCount() > 0) {
hash = (37 * hash) + NODETOATTRIBUTES_FIELD_NUMBER;
hash = (53 * hash) + getNodeToAttributesList().hashCode();
}
if (hasFailOnUnknownNodes()) {
hash = (37 * hash) + FAILONUNKNOWNNODES_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean(
getFailOnUnknownNodes());
}
if (hasSubClusterId()) {
hash = (37 * hash) + SUB_CLUSTER_ID_FIELD_NUMBER;
hash = (53 * hash) + getSubClusterId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.NodesToAttributesMappingRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.NodesToAttributesMappingRequestProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_NodesToAttributesMappingRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_NodesToAttributesMappingRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
operation_ = 1;
if (nodeToAttributesBuilder_ == null) {
nodeToAttributes_ = java.util.Collections.emptyList();
} else {
nodeToAttributes_ = null;
nodeToAttributesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
failOnUnknownNodes_ = false;
subClusterId_ = "";
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_NodesToAttributesMappingRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto result) {
if (nodeToAttributesBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)) {
nodeToAttributes_ = java.util.Collections.unmodifiableList(nodeToAttributes_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.nodeToAttributes_ = nodeToAttributes_;
} else {
result.nodeToAttributes_ = nodeToAttributesBuilder_.build();
}
}
private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.operation_ = operation_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.failOnUnknownNodes_ = failOnUnknownNodes_;
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.subClusterId_ = subClusterId_;
to_bitField0_ |= 0x00000004;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto.getDefaultInstance()) return this;
if (other.hasOperation()) {
setOperation(other.getOperation());
}
if (nodeToAttributesBuilder_ == null) {
if (!other.nodeToAttributes_.isEmpty()) {
if (nodeToAttributes_.isEmpty()) {
nodeToAttributes_ = other.nodeToAttributes_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureNodeToAttributesIsMutable();
nodeToAttributes_.addAll(other.nodeToAttributes_);
}
onChanged();
}
} else {
if (!other.nodeToAttributes_.isEmpty()) {
if (nodeToAttributesBuilder_.isEmpty()) {
nodeToAttributesBuilder_.dispose();
nodeToAttributesBuilder_ = null;
nodeToAttributes_ = other.nodeToAttributes_;
bitField0_ = (bitField0_ & ~0x00000002);
nodeToAttributesBuilder_ =
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getNodeToAttributesFieldBuilder() : null;
} else {
nodeToAttributesBuilder_.addAllMessages(other.nodeToAttributes_);
}
}
}
if (other.hasFailOnUnknownNodes()) {
setFailOnUnknownNodes(other.getFailOnUnknownNodes());
}
if (other.hasSubClusterId()) {
subClusterId_ = other.subClusterId_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
for (int i = 0; i < getNodeToAttributesCount(); i++) {
if (!getNodeToAttributes(i).isInitialized()) {
return false;
}
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
int tmpRaw = input.readEnum();
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AttributeMappingOperationTypeProto tmpValue =
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AttributeMappingOperationTypeProto.forNumber(tmpRaw);
if (tmpValue == null) {
mergeUnknownVarintField(1, tmpRaw);
} else {
operation_ = tmpRaw;
bitField0_ |= 0x00000001;
}
break;
} // case 8
case 18: {
org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto m =
input.readMessage(
org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.PARSER,
extensionRegistry);
if (nodeToAttributesBuilder_ == null) {
ensureNodeToAttributesIsMutable();
nodeToAttributes_.add(m);
} else {
nodeToAttributesBuilder_.addMessage(m);
}
break;
} // case 18
case 24: {
failOnUnknownNodes_ = input.readBool();
bitField0_ |= 0x00000004;
break;
} // case 24
case 34: {
subClusterId_ = input.readBytes();
bitField0_ |= 0x00000008;
break;
} // case 34
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int operation_ = 1;
/**
* optional .hadoop.yarn.AttributeMappingOperationTypeProto operation = 1 [default = REPLACE];
* @return Whether the operation field is set.
*/
@java.lang.Override public boolean hasOperation() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.AttributeMappingOperationTypeProto operation = 1 [default = REPLACE];
* @return The operation.
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AttributeMappingOperationTypeProto getOperation() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AttributeMappingOperationTypeProto result = org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AttributeMappingOperationTypeProto.forNumber(operation_);
return result == null ? org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AttributeMappingOperationTypeProto.REPLACE : result;
}
/**
* optional .hadoop.yarn.AttributeMappingOperationTypeProto operation = 1 [default = REPLACE];
* @param value The operation to set.
* @return This builder for chaining.
*/
public Builder setOperation(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AttributeMappingOperationTypeProto value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
operation_ = value.getNumber();
onChanged();
return this;
}
/**
* optional .hadoop.yarn.AttributeMappingOperationTypeProto operation = 1 [default = REPLACE];
* @return This builder for chaining.
*/
public Builder clearOperation() {
bitField0_ = (bitField0_ & ~0x00000001);
operation_ = 1;
onChanged();
return this;
}
private java.util.List nodeToAttributes_ =
java.util.Collections.emptyList();
private void ensureNodeToAttributesIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
nodeToAttributes_ = new java.util.ArrayList(nodeToAttributes_);
bitField0_ |= 0x00000002;
}
}
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProtoOrBuilder> nodeToAttributesBuilder_;
/**
* repeated .hadoop.yarn.NodeToAttributesProto nodeToAttributes = 2;
*/
public java.util.List getNodeToAttributesList() {
if (nodeToAttributesBuilder_ == null) {
return java.util.Collections.unmodifiableList(nodeToAttributes_);
} else {
return nodeToAttributesBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.yarn.NodeToAttributesProto nodeToAttributes = 2;
*/
public int getNodeToAttributesCount() {
if (nodeToAttributesBuilder_ == null) {
return nodeToAttributes_.size();
} else {
return nodeToAttributesBuilder_.getCount();
}
}
/**
* repeated .hadoop.yarn.NodeToAttributesProto nodeToAttributes = 2;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto getNodeToAttributes(int index) {
if (nodeToAttributesBuilder_ == null) {
return nodeToAttributes_.get(index);
} else {
return nodeToAttributesBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.yarn.NodeToAttributesProto nodeToAttributes = 2;
*/
public Builder setNodeToAttributes(
int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto value) {
if (nodeToAttributesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNodeToAttributesIsMutable();
nodeToAttributes_.set(index, value);
onChanged();
} else {
nodeToAttributesBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.NodeToAttributesProto nodeToAttributes = 2;
*/
public Builder setNodeToAttributes(
int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.Builder builderForValue) {
if (nodeToAttributesBuilder_ == null) {
ensureNodeToAttributesIsMutable();
nodeToAttributes_.set(index, builderForValue.build());
onChanged();
} else {
nodeToAttributesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.NodeToAttributesProto nodeToAttributes = 2;
*/
public Builder addNodeToAttributes(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto value) {
if (nodeToAttributesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNodeToAttributesIsMutable();
nodeToAttributes_.add(value);
onChanged();
} else {
nodeToAttributesBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.yarn.NodeToAttributesProto nodeToAttributes = 2;
*/
public Builder addNodeToAttributes(
int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto value) {
if (nodeToAttributesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNodeToAttributesIsMutable();
nodeToAttributes_.add(index, value);
onChanged();
} else {
nodeToAttributesBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.NodeToAttributesProto nodeToAttributes = 2;
*/
public Builder addNodeToAttributes(
org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.Builder builderForValue) {
if (nodeToAttributesBuilder_ == null) {
ensureNodeToAttributesIsMutable();
nodeToAttributes_.add(builderForValue.build());
onChanged();
} else {
nodeToAttributesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.NodeToAttributesProto nodeToAttributes = 2;
*/
public Builder addNodeToAttributes(
int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.Builder builderForValue) {
if (nodeToAttributesBuilder_ == null) {
ensureNodeToAttributesIsMutable();
nodeToAttributes_.add(index, builderForValue.build());
onChanged();
} else {
nodeToAttributesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.NodeToAttributesProto nodeToAttributes = 2;
*/
public Builder addAllNodeToAttributes(
java.lang.Iterable extends org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto> values) {
if (nodeToAttributesBuilder_ == null) {
ensureNodeToAttributesIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, nodeToAttributes_);
onChanged();
} else {
nodeToAttributesBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.yarn.NodeToAttributesProto nodeToAttributes = 2;
*/
public Builder clearNodeToAttributes() {
if (nodeToAttributesBuilder_ == null) {
nodeToAttributes_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
nodeToAttributesBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.yarn.NodeToAttributesProto nodeToAttributes = 2;
*/
public Builder removeNodeToAttributes(int index) {
if (nodeToAttributesBuilder_ == null) {
ensureNodeToAttributesIsMutable();
nodeToAttributes_.remove(index);
onChanged();
} else {
nodeToAttributesBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.yarn.NodeToAttributesProto nodeToAttributes = 2;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.Builder getNodeToAttributesBuilder(
int index) {
return getNodeToAttributesFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.yarn.NodeToAttributesProto nodeToAttributes = 2;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProtoOrBuilder getNodeToAttributesOrBuilder(
int index) {
if (nodeToAttributesBuilder_ == null) {
return nodeToAttributes_.get(index); } else {
return nodeToAttributesBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.yarn.NodeToAttributesProto nodeToAttributes = 2;
*/
public java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProtoOrBuilder>
getNodeToAttributesOrBuilderList() {
if (nodeToAttributesBuilder_ != null) {
return nodeToAttributesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(nodeToAttributes_);
}
}
/**
* repeated .hadoop.yarn.NodeToAttributesProto nodeToAttributes = 2;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.Builder addNodeToAttributesBuilder() {
return getNodeToAttributesFieldBuilder().addBuilder(
org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.NodeToAttributesProto nodeToAttributes = 2;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.Builder addNodeToAttributesBuilder(
int index) {
return getNodeToAttributesFieldBuilder().addBuilder(
index, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.NodeToAttributesProto nodeToAttributes = 2;
*/
public java.util.List
getNodeToAttributesBuilderList() {
return getNodeToAttributesFieldBuilder().getBuilderList();
}
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProtoOrBuilder>
getNodeToAttributesFieldBuilder() {
if (nodeToAttributesBuilder_ == null) {
nodeToAttributesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProtoOrBuilder>(
nodeToAttributes_,
((bitField0_ & 0x00000002) != 0),
getParentForChildren(),
isClean());
nodeToAttributes_ = null;
}
return nodeToAttributesBuilder_;
}
private boolean failOnUnknownNodes_ ;
/**
* optional bool failOnUnknownNodes = 3;
* @return Whether the failOnUnknownNodes field is set.
*/
@java.lang.Override
public boolean hasFailOnUnknownNodes() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional bool failOnUnknownNodes = 3;
* @return The failOnUnknownNodes.
*/
@java.lang.Override
public boolean getFailOnUnknownNodes() {
return failOnUnknownNodes_;
}
/**
* optional bool failOnUnknownNodes = 3;
* @param value The failOnUnknownNodes to set.
* @return This builder for chaining.
*/
public Builder setFailOnUnknownNodes(boolean value) {
failOnUnknownNodes_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* optional bool failOnUnknownNodes = 3;
* @return This builder for chaining.
*/
public Builder clearFailOnUnknownNodes() {
bitField0_ = (bitField0_ & ~0x00000004);
failOnUnknownNodes_ = false;
onChanged();
return this;
}
private java.lang.Object subClusterId_ = "";
/**
* optional string sub_cluster_id = 4;
* @return Whether the subClusterId field is set.
*/
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* optional string sub_cluster_id = 4;
* @return The subClusterId.
*/
public java.lang.String getSubClusterId() {
java.lang.Object ref = subClusterId_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
subClusterId_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string sub_cluster_id = 4;
* @return The bytes for subClusterId.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes() {
java.lang.Object ref = subClusterId_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
subClusterId_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string sub_cluster_id = 4;
* @param value The subClusterId to set.
* @return This builder for chaining.
*/
public Builder setSubClusterId(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
subClusterId_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
* optional string sub_cluster_id = 4;
* @return This builder for chaining.
*/
public Builder clearSubClusterId() {
subClusterId_ = getDefaultInstance().getSubClusterId();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
* optional string sub_cluster_id = 4;
* @param value The bytes for subClusterId to set.
* @return This builder for chaining.
*/
public Builder setSubClusterIdBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
subClusterId_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.NodesToAttributesMappingRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.NodesToAttributesMappingRequestProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public NodesToAttributesMappingRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface NodesToAttributesMappingResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.NodesToAttributesMappingResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.yarn.NodesToAttributesMappingResponseProto}
*/
public static final class NodesToAttributesMappingResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.NodesToAttributesMappingResponseProto)
NodesToAttributesMappingResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use NodesToAttributesMappingResponseProto.newBuilder() to construct.
private NodesToAttributesMappingResponseProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private NodesToAttributesMappingResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new NodesToAttributesMappingResponseProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_NodesToAttributesMappingResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_NodesToAttributesMappingResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.NodesToAttributesMappingResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.NodesToAttributesMappingResponseProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_NodesToAttributesMappingResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_NodesToAttributesMappingResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_NodesToAttributesMappingResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.NodesToAttributesMappingResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.NodesToAttributesMappingResponseProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public NodesToAttributesMappingResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface DeregisterSubClusterRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.DeregisterSubClusterRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional string subClusterId = 1;
* @return Whether the subClusterId field is set.
*/
boolean hasSubClusterId();
/**
* optional string subClusterId = 1;
* @return The subClusterId.
*/
java.lang.String getSubClusterId();
/**
* optional string subClusterId = 1;
* @return The bytes for subClusterId.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes();
}
/**
* Protobuf type {@code hadoop.yarn.DeregisterSubClusterRequestProto}
*/
public static final class DeregisterSubClusterRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.DeregisterSubClusterRequestProto)
DeregisterSubClusterRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use DeregisterSubClusterRequestProto.newBuilder() to construct.
private DeregisterSubClusterRequestProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private DeregisterSubClusterRequestProto() {
subClusterId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new DeregisterSubClusterRequestProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_DeregisterSubClusterRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_DeregisterSubClusterRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto.Builder.class);
}
private int bitField0_;
public static final int SUBCLUSTERID_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object subClusterId_ = "";
/**
* optional string subClusterId = 1;
* @return Whether the subClusterId field is set.
*/
@java.lang.Override
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string subClusterId = 1;
* @return The subClusterId.
*/
@java.lang.Override
public java.lang.String getSubClusterId() {
java.lang.Object ref = subClusterId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
subClusterId_ = s;
}
return s;
}
}
/**
* optional string subClusterId = 1;
* @return The bytes for subClusterId.
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes() {
java.lang.Object ref = subClusterId_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
subClusterId_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, subClusterId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, subClusterId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto) obj;
if (hasSubClusterId() != other.hasSubClusterId()) return false;
if (hasSubClusterId()) {
if (!getSubClusterId()
.equals(other.getSubClusterId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasSubClusterId()) {
hash = (37 * hash) + SUBCLUSTERID_FIELD_NUMBER;
hash = (53 * hash) + getSubClusterId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.DeregisterSubClusterRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.DeregisterSubClusterRequestProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_DeregisterSubClusterRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_DeregisterSubClusterRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
subClusterId_ = "";
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_DeregisterSubClusterRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.subClusterId_ = subClusterId_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto.getDefaultInstance()) return this;
if (other.hasSubClusterId()) {
subClusterId_ = other.subClusterId_;
bitField0_ |= 0x00000001;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
subClusterId_ = input.readBytes();
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object subClusterId_ = "";
/**
* optional string subClusterId = 1;
* @return Whether the subClusterId field is set.
*/
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string subClusterId = 1;
* @return The subClusterId.
*/
public java.lang.String getSubClusterId() {
java.lang.Object ref = subClusterId_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
subClusterId_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string subClusterId = 1;
* @return The bytes for subClusterId.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getSubClusterIdBytes() {
java.lang.Object ref = subClusterId_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
subClusterId_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string subClusterId = 1;
* @param value The subClusterId to set.
* @return This builder for chaining.
*/
public Builder setSubClusterId(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
subClusterId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional string subClusterId = 1;
* @return This builder for chaining.
*/
public Builder clearSubClusterId() {
subClusterId_ = getDefaultInstance().getSubClusterId();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* optional string subClusterId = 1;
* @param value The bytes for subClusterId to set.
* @return This builder for chaining.
*/
public Builder setSubClusterIdBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
subClusterId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.DeregisterSubClusterRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.DeregisterSubClusterRequestProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public DeregisterSubClusterRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface DeregisterSubClusterResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.DeregisterSubClusterResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* repeated .hadoop.yarn.DeregisterSubClustersProto deregisterSubClusters = 1;
*/
java.util.List
getDeregisterSubClustersList();
/**
* repeated .hadoop.yarn.DeregisterSubClustersProto deregisterSubClusters = 1;
*/
org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto getDeregisterSubClusters(int index);
/**
* repeated .hadoop.yarn.DeregisterSubClustersProto deregisterSubClusters = 1;
*/
int getDeregisterSubClustersCount();
/**
* repeated .hadoop.yarn.DeregisterSubClustersProto deregisterSubClusters = 1;
*/
java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProtoOrBuilder>
getDeregisterSubClustersOrBuilderList();
/**
* repeated .hadoop.yarn.DeregisterSubClustersProto deregisterSubClusters = 1;
*/
org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProtoOrBuilder getDeregisterSubClustersOrBuilder(
int index);
}
/**
* Protobuf type {@code hadoop.yarn.DeregisterSubClusterResponseProto}
*/
public static final class DeregisterSubClusterResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.DeregisterSubClusterResponseProto)
DeregisterSubClusterResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use DeregisterSubClusterResponseProto.newBuilder() to construct.
private DeregisterSubClusterResponseProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private DeregisterSubClusterResponseProto() {
deregisterSubClusters_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new DeregisterSubClusterResponseProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_DeregisterSubClusterResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_DeregisterSubClusterResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto.Builder.class);
}
public static final int DEREGISTERSUBCLUSTERS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List deregisterSubClusters_;
/**
* repeated .hadoop.yarn.DeregisterSubClustersProto deregisterSubClusters = 1;
*/
@java.lang.Override
public java.util.List getDeregisterSubClustersList() {
return deregisterSubClusters_;
}
/**
* repeated .hadoop.yarn.DeregisterSubClustersProto deregisterSubClusters = 1;
*/
@java.lang.Override
public java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProtoOrBuilder>
getDeregisterSubClustersOrBuilderList() {
return deregisterSubClusters_;
}
/**
* repeated .hadoop.yarn.DeregisterSubClustersProto deregisterSubClusters = 1;
*/
@java.lang.Override
public int getDeregisterSubClustersCount() {
return deregisterSubClusters_.size();
}
/**
* repeated .hadoop.yarn.DeregisterSubClustersProto deregisterSubClusters = 1;
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto getDeregisterSubClusters(int index) {
return deregisterSubClusters_.get(index);
}
/**
* repeated .hadoop.yarn.DeregisterSubClustersProto deregisterSubClusters = 1;
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProtoOrBuilder getDeregisterSubClustersOrBuilder(
int index) {
return deregisterSubClusters_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < deregisterSubClusters_.size(); i++) {
output.writeMessage(1, deregisterSubClusters_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < deregisterSubClusters_.size(); i++) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, deregisterSubClusters_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto) obj;
if (!getDeregisterSubClustersList()
.equals(other.getDeregisterSubClustersList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getDeregisterSubClustersCount() > 0) {
hash = (37 * hash) + DEREGISTERSUBCLUSTERS_FIELD_NUMBER;
hash = (53 * hash) + getDeregisterSubClustersList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.DeregisterSubClusterResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.DeregisterSubClusterResponseProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_DeregisterSubClusterResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_DeregisterSubClusterResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (deregisterSubClustersBuilder_ == null) {
deregisterSubClusters_ = java.util.Collections.emptyList();
} else {
deregisterSubClusters_ = null;
deregisterSubClustersBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_DeregisterSubClusterResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto result) {
if (deregisterSubClustersBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
deregisterSubClusters_ = java.util.Collections.unmodifiableList(deregisterSubClusters_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.deregisterSubClusters_ = deregisterSubClusters_;
} else {
result.deregisterSubClusters_ = deregisterSubClustersBuilder_.build();
}
}
private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto.getDefaultInstance()) return this;
if (deregisterSubClustersBuilder_ == null) {
if (!other.deregisterSubClusters_.isEmpty()) {
if (deregisterSubClusters_.isEmpty()) {
deregisterSubClusters_ = other.deregisterSubClusters_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureDeregisterSubClustersIsMutable();
deregisterSubClusters_.addAll(other.deregisterSubClusters_);
}
onChanged();
}
} else {
if (!other.deregisterSubClusters_.isEmpty()) {
if (deregisterSubClustersBuilder_.isEmpty()) {
deregisterSubClustersBuilder_.dispose();
deregisterSubClustersBuilder_ = null;
deregisterSubClusters_ = other.deregisterSubClusters_;
bitField0_ = (bitField0_ & ~0x00000001);
deregisterSubClustersBuilder_ =
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getDeregisterSubClustersFieldBuilder() : null;
} else {
deregisterSubClustersBuilder_.addAllMessages(other.deregisterSubClusters_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto m =
input.readMessage(
org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto.PARSER,
extensionRegistry);
if (deregisterSubClustersBuilder_ == null) {
ensureDeregisterSubClustersIsMutable();
deregisterSubClusters_.add(m);
} else {
deregisterSubClustersBuilder_.addMessage(m);
}
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List deregisterSubClusters_ =
java.util.Collections.emptyList();
private void ensureDeregisterSubClustersIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
deregisterSubClusters_ = new java.util.ArrayList(deregisterSubClusters_);
bitField0_ |= 0x00000001;
}
}
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto, org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProtoOrBuilder> deregisterSubClustersBuilder_;
/**
* repeated .hadoop.yarn.DeregisterSubClustersProto deregisterSubClusters = 1;
*/
public java.util.List getDeregisterSubClustersList() {
if (deregisterSubClustersBuilder_ == null) {
return java.util.Collections.unmodifiableList(deregisterSubClusters_);
} else {
return deregisterSubClustersBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.yarn.DeregisterSubClustersProto deregisterSubClusters = 1;
*/
public int getDeregisterSubClustersCount() {
if (deregisterSubClustersBuilder_ == null) {
return deregisterSubClusters_.size();
} else {
return deregisterSubClustersBuilder_.getCount();
}
}
/**
* repeated .hadoop.yarn.DeregisterSubClustersProto deregisterSubClusters = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto getDeregisterSubClusters(int index) {
if (deregisterSubClustersBuilder_ == null) {
return deregisterSubClusters_.get(index);
} else {
return deregisterSubClustersBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.yarn.DeregisterSubClustersProto deregisterSubClusters = 1;
*/
public Builder setDeregisterSubClusters(
int index, org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto value) {
if (deregisterSubClustersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDeregisterSubClustersIsMutable();
deregisterSubClusters_.set(index, value);
onChanged();
} else {
deregisterSubClustersBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.DeregisterSubClustersProto deregisterSubClusters = 1;
*/
public Builder setDeregisterSubClusters(
int index, org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto.Builder builderForValue) {
if (deregisterSubClustersBuilder_ == null) {
ensureDeregisterSubClustersIsMutable();
deregisterSubClusters_.set(index, builderForValue.build());
onChanged();
} else {
deregisterSubClustersBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.DeregisterSubClustersProto deregisterSubClusters = 1;
*/
public Builder addDeregisterSubClusters(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto value) {
if (deregisterSubClustersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDeregisterSubClustersIsMutable();
deregisterSubClusters_.add(value);
onChanged();
} else {
deregisterSubClustersBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.yarn.DeregisterSubClustersProto deregisterSubClusters = 1;
*/
public Builder addDeregisterSubClusters(
int index, org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto value) {
if (deregisterSubClustersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDeregisterSubClustersIsMutable();
deregisterSubClusters_.add(index, value);
onChanged();
} else {
deregisterSubClustersBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.DeregisterSubClustersProto deregisterSubClusters = 1;
*/
public Builder addDeregisterSubClusters(
org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto.Builder builderForValue) {
if (deregisterSubClustersBuilder_ == null) {
ensureDeregisterSubClustersIsMutable();
deregisterSubClusters_.add(builderForValue.build());
onChanged();
} else {
deregisterSubClustersBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.DeregisterSubClustersProto deregisterSubClusters = 1;
*/
public Builder addDeregisterSubClusters(
int index, org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto.Builder builderForValue) {
if (deregisterSubClustersBuilder_ == null) {
ensureDeregisterSubClustersIsMutable();
deregisterSubClusters_.add(index, builderForValue.build());
onChanged();
} else {
deregisterSubClustersBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.DeregisterSubClustersProto deregisterSubClusters = 1;
*/
public Builder addAllDeregisterSubClusters(
java.lang.Iterable extends org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto> values) {
if (deregisterSubClustersBuilder_ == null) {
ensureDeregisterSubClustersIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, deregisterSubClusters_);
onChanged();
} else {
deregisterSubClustersBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.yarn.DeregisterSubClustersProto deregisterSubClusters = 1;
*/
public Builder clearDeregisterSubClusters() {
if (deregisterSubClustersBuilder_ == null) {
deregisterSubClusters_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
deregisterSubClustersBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.yarn.DeregisterSubClustersProto deregisterSubClusters = 1;
*/
public Builder removeDeregisterSubClusters(int index) {
if (deregisterSubClustersBuilder_ == null) {
ensureDeregisterSubClustersIsMutable();
deregisterSubClusters_.remove(index);
onChanged();
} else {
deregisterSubClustersBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.yarn.DeregisterSubClustersProto deregisterSubClusters = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto.Builder getDeregisterSubClustersBuilder(
int index) {
return getDeregisterSubClustersFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.yarn.DeregisterSubClustersProto deregisterSubClusters = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProtoOrBuilder getDeregisterSubClustersOrBuilder(
int index) {
if (deregisterSubClustersBuilder_ == null) {
return deregisterSubClusters_.get(index); } else {
return deregisterSubClustersBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.yarn.DeregisterSubClustersProto deregisterSubClusters = 1;
*/
public java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProtoOrBuilder>
getDeregisterSubClustersOrBuilderList() {
if (deregisterSubClustersBuilder_ != null) {
return deregisterSubClustersBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(deregisterSubClusters_);
}
}
/**
* repeated .hadoop.yarn.DeregisterSubClustersProto deregisterSubClusters = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto.Builder addDeregisterSubClustersBuilder() {
return getDeregisterSubClustersFieldBuilder().addBuilder(
org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.DeregisterSubClustersProto deregisterSubClusters = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto.Builder addDeregisterSubClustersBuilder(
int index) {
return getDeregisterSubClustersFieldBuilder().addBuilder(
index, org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.DeregisterSubClustersProto deregisterSubClusters = 1;
*/
public java.util.List
getDeregisterSubClustersBuilderList() {
return getDeregisterSubClustersFieldBuilder().getBuilderList();
}
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto, org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProtoOrBuilder>
getDeregisterSubClustersFieldBuilder() {
if (deregisterSubClustersBuilder_ == null) {
deregisterSubClustersBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto, org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProtoOrBuilder>(
deregisterSubClusters_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
deregisterSubClusters_ = null;
}
return deregisterSubClustersBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.DeregisterSubClusterResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.DeregisterSubClusterResponseProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public DeregisterSubClusterResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.DeregisterSubClusterResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface SaveFederationQueuePolicyRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.SaveFederationQueuePolicyRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* required string queue = 1;
* @return Whether the queue field is set.
*/
boolean hasQueue();
/**
* required string queue = 1;
* @return The queue.
*/
java.lang.String getQueue();
/**
* required string queue = 1;
* @return The bytes for queue.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getQueueBytes();
/**
* required .hadoop.yarn.FederationQueueWeightProto federationQueueWeight = 2;
* @return Whether the federationQueueWeight field is set.
*/
boolean hasFederationQueueWeight();
/**
* required .hadoop.yarn.FederationQueueWeightProto federationQueueWeight = 2;
* @return The federationQueueWeight.
*/
org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto getFederationQueueWeight();
/**
* required .hadoop.yarn.FederationQueueWeightProto federationQueueWeight = 2;
*/
org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProtoOrBuilder getFederationQueueWeightOrBuilder();
/**
* optional string policyManagerClassName = 3;
* @return Whether the policyManagerClassName field is set.
*/
boolean hasPolicyManagerClassName();
/**
* optional string policyManagerClassName = 3;
* @return The policyManagerClassName.
*/
java.lang.String getPolicyManagerClassName();
/**
* optional string policyManagerClassName = 3;
* @return The bytes for policyManagerClassName.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getPolicyManagerClassNameBytes();
}
/**
* Protobuf type {@code hadoop.yarn.SaveFederationQueuePolicyRequestProto}
*/
public static final class SaveFederationQueuePolicyRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.SaveFederationQueuePolicyRequestProto)
SaveFederationQueuePolicyRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use SaveFederationQueuePolicyRequestProto.newBuilder() to construct.
private SaveFederationQueuePolicyRequestProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private SaveFederationQueuePolicyRequestProto() {
queue_ = "";
policyManagerClassName_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new SaveFederationQueuePolicyRequestProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_SaveFederationQueuePolicyRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_SaveFederationQueuePolicyRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto.Builder.class);
}
private int bitField0_;
public static final int QUEUE_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object queue_ = "";
/**
* required string queue = 1;
* @return Whether the queue field is set.
*/
@java.lang.Override
public boolean hasQueue() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string queue = 1;
* @return The queue.
*/
@java.lang.Override
public java.lang.String getQueue() {
java.lang.Object ref = queue_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
queue_ = s;
}
return s;
}
}
/**
* required string queue = 1;
* @return The bytes for queue.
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getQueueBytes() {
java.lang.Object ref = queue_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
queue_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int FEDERATIONQUEUEWEIGHT_FIELD_NUMBER = 2;
private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto federationQueueWeight_;
/**
* required .hadoop.yarn.FederationQueueWeightProto federationQueueWeight = 2;
* @return Whether the federationQueueWeight field is set.
*/
@java.lang.Override
public boolean hasFederationQueueWeight() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* required .hadoop.yarn.FederationQueueWeightProto federationQueueWeight = 2;
* @return The federationQueueWeight.
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto getFederationQueueWeight() {
return federationQueueWeight_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto.getDefaultInstance() : federationQueueWeight_;
}
/**
* required .hadoop.yarn.FederationQueueWeightProto federationQueueWeight = 2;
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProtoOrBuilder getFederationQueueWeightOrBuilder() {
return federationQueueWeight_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto.getDefaultInstance() : federationQueueWeight_;
}
public static final int POLICYMANAGERCLASSNAME_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object policyManagerClassName_ = "";
/**
* optional string policyManagerClassName = 3;
* @return Whether the policyManagerClassName field is set.
*/
@java.lang.Override
public boolean hasPolicyManagerClassName() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional string policyManagerClassName = 3;
* @return The policyManagerClassName.
*/
@java.lang.Override
public java.lang.String getPolicyManagerClassName() {
java.lang.Object ref = policyManagerClassName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
policyManagerClassName_ = s;
}
return s;
}
}
/**
* optional string policyManagerClassName = 3;
* @return The bytes for policyManagerClassName.
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getPolicyManagerClassNameBytes() {
java.lang.Object ref = policyManagerClassName_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
policyManagerClassName_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasQueue()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasFederationQueueWeight()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, queue_);
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getFederationQueueWeight());
}
if (((bitField0_ & 0x00000004) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, policyManagerClassName_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, queue_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(2, getFederationQueueWeight());
}
if (((bitField0_ & 0x00000004) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, policyManagerClassName_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto) obj;
if (hasQueue() != other.hasQueue()) return false;
if (hasQueue()) {
if (!getQueue()
.equals(other.getQueue())) return false;
}
if (hasFederationQueueWeight() != other.hasFederationQueueWeight()) return false;
if (hasFederationQueueWeight()) {
if (!getFederationQueueWeight()
.equals(other.getFederationQueueWeight())) return false;
}
if (hasPolicyManagerClassName() != other.hasPolicyManagerClassName()) return false;
if (hasPolicyManagerClassName()) {
if (!getPolicyManagerClassName()
.equals(other.getPolicyManagerClassName())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasQueue()) {
hash = (37 * hash) + QUEUE_FIELD_NUMBER;
hash = (53 * hash) + getQueue().hashCode();
}
if (hasFederationQueueWeight()) {
hash = (37 * hash) + FEDERATIONQUEUEWEIGHT_FIELD_NUMBER;
hash = (53 * hash) + getFederationQueueWeight().hashCode();
}
if (hasPolicyManagerClassName()) {
hash = (37 * hash) + POLICYMANAGERCLASSNAME_FIELD_NUMBER;
hash = (53 * hash) + getPolicyManagerClassName().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.SaveFederationQueuePolicyRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.SaveFederationQueuePolicyRequestProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_SaveFederationQueuePolicyRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_SaveFederationQueuePolicyRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getFederationQueueWeightFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
queue_ = "";
federationQueueWeight_ = null;
if (federationQueueWeightBuilder_ != null) {
federationQueueWeightBuilder_.dispose();
federationQueueWeightBuilder_ = null;
}
policyManagerClassName_ = "";
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_SaveFederationQueuePolicyRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.queue_ = queue_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.federationQueueWeight_ = federationQueueWeightBuilder_ == null
? federationQueueWeight_
: federationQueueWeightBuilder_.build();
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.policyManagerClassName_ = policyManagerClassName_;
to_bitField0_ |= 0x00000004;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto.getDefaultInstance()) return this;
if (other.hasQueue()) {
queue_ = other.queue_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasFederationQueueWeight()) {
mergeFederationQueueWeight(other.getFederationQueueWeight());
}
if (other.hasPolicyManagerClassName()) {
policyManagerClassName_ = other.policyManagerClassName_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasQueue()) {
return false;
}
if (!hasFederationQueueWeight()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
queue_ = input.readBytes();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
input.readMessage(
getFederationQueueWeightFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26: {
policyManagerClassName_ = input.readBytes();
bitField0_ |= 0x00000004;
break;
} // case 26
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object queue_ = "";
/**
* required string queue = 1;
* @return Whether the queue field is set.
*/
public boolean hasQueue() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string queue = 1;
* @return The queue.
*/
public java.lang.String getQueue() {
java.lang.Object ref = queue_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
queue_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string queue = 1;
* @return The bytes for queue.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getQueueBytes() {
java.lang.Object ref = queue_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
queue_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* required string queue = 1;
* @param value The queue to set.
* @return This builder for chaining.
*/
public Builder setQueue(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
queue_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* required string queue = 1;
* @return This builder for chaining.
*/
public Builder clearQueue() {
queue_ = getDefaultInstance().getQueue();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* required string queue = 1;
* @param value The bytes for queue to set.
* @return This builder for chaining.
*/
public Builder setQueueBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
queue_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto federationQueueWeight_;
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto, org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProtoOrBuilder> federationQueueWeightBuilder_;
/**
* required .hadoop.yarn.FederationQueueWeightProto federationQueueWeight = 2;
* @return Whether the federationQueueWeight field is set.
*/
public boolean hasFederationQueueWeight() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* required .hadoop.yarn.FederationQueueWeightProto federationQueueWeight = 2;
* @return The federationQueueWeight.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto getFederationQueueWeight() {
if (federationQueueWeightBuilder_ == null) {
return federationQueueWeight_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto.getDefaultInstance() : federationQueueWeight_;
} else {
return federationQueueWeightBuilder_.getMessage();
}
}
/**
* required .hadoop.yarn.FederationQueueWeightProto federationQueueWeight = 2;
*/
public Builder setFederationQueueWeight(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto value) {
if (federationQueueWeightBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
federationQueueWeight_ = value;
} else {
federationQueueWeightBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* required .hadoop.yarn.FederationQueueWeightProto federationQueueWeight = 2;
*/
public Builder setFederationQueueWeight(
org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto.Builder builderForValue) {
if (federationQueueWeightBuilder_ == null) {
federationQueueWeight_ = builderForValue.build();
} else {
federationQueueWeightBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* required .hadoop.yarn.FederationQueueWeightProto federationQueueWeight = 2;
*/
public Builder mergeFederationQueueWeight(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto value) {
if (federationQueueWeightBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0) &&
federationQueueWeight_ != null &&
federationQueueWeight_ != org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto.getDefaultInstance()) {
getFederationQueueWeightBuilder().mergeFrom(value);
} else {
federationQueueWeight_ = value;
}
} else {
federationQueueWeightBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* required .hadoop.yarn.FederationQueueWeightProto federationQueueWeight = 2;
*/
public Builder clearFederationQueueWeight() {
bitField0_ = (bitField0_ & ~0x00000002);
federationQueueWeight_ = null;
if (federationQueueWeightBuilder_ != null) {
federationQueueWeightBuilder_.dispose();
federationQueueWeightBuilder_ = null;
}
onChanged();
return this;
}
/**
* required .hadoop.yarn.FederationQueueWeightProto federationQueueWeight = 2;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto.Builder getFederationQueueWeightBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getFederationQueueWeightFieldBuilder().getBuilder();
}
/**
* required .hadoop.yarn.FederationQueueWeightProto federationQueueWeight = 2;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProtoOrBuilder getFederationQueueWeightOrBuilder() {
if (federationQueueWeightBuilder_ != null) {
return federationQueueWeightBuilder_.getMessageOrBuilder();
} else {
return federationQueueWeight_ == null ?
org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto.getDefaultInstance() : federationQueueWeight_;
}
}
/**
* required .hadoop.yarn.FederationQueueWeightProto federationQueueWeight = 2;
*/
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto, org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProtoOrBuilder>
getFederationQueueWeightFieldBuilder() {
if (federationQueueWeightBuilder_ == null) {
federationQueueWeightBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto, org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProtoOrBuilder>(
getFederationQueueWeight(),
getParentForChildren(),
isClean());
federationQueueWeight_ = null;
}
return federationQueueWeightBuilder_;
}
private java.lang.Object policyManagerClassName_ = "";
/**
* optional string policyManagerClassName = 3;
* @return Whether the policyManagerClassName field is set.
*/
public boolean hasPolicyManagerClassName() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional string policyManagerClassName = 3;
* @return The policyManagerClassName.
*/
public java.lang.String getPolicyManagerClassName() {
java.lang.Object ref = policyManagerClassName_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
policyManagerClassName_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string policyManagerClassName = 3;
* @return The bytes for policyManagerClassName.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getPolicyManagerClassNameBytes() {
java.lang.Object ref = policyManagerClassName_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
policyManagerClassName_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string policyManagerClassName = 3;
* @param value The policyManagerClassName to set.
* @return This builder for chaining.
*/
public Builder setPolicyManagerClassName(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
policyManagerClassName_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* optional string policyManagerClassName = 3;
* @return This builder for chaining.
*/
public Builder clearPolicyManagerClassName() {
policyManagerClassName_ = getDefaultInstance().getPolicyManagerClassName();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
* optional string policyManagerClassName = 3;
* @param value The bytes for policyManagerClassName to set.
* @return This builder for chaining.
*/
public Builder setPolicyManagerClassNameBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
policyManagerClassName_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.SaveFederationQueuePolicyRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.SaveFederationQueuePolicyRequestProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public SaveFederationQueuePolicyRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface SaveFederationQueuePolicyResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.SaveFederationQueuePolicyResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* required string message = 1;
* @return Whether the message field is set.
*/
boolean hasMessage();
/**
* required string message = 1;
* @return The message.
*/
java.lang.String getMessage();
/**
* required string message = 1;
* @return The bytes for message.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getMessageBytes();
}
/**
* Protobuf type {@code hadoop.yarn.SaveFederationQueuePolicyResponseProto}
*/
public static final class SaveFederationQueuePolicyResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.SaveFederationQueuePolicyResponseProto)
SaveFederationQueuePolicyResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use SaveFederationQueuePolicyResponseProto.newBuilder() to construct.
private SaveFederationQueuePolicyResponseProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private SaveFederationQueuePolicyResponseProto() {
message_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new SaveFederationQueuePolicyResponseProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_SaveFederationQueuePolicyResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_SaveFederationQueuePolicyResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto.Builder.class);
}
private int bitField0_;
public static final int MESSAGE_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object message_ = "";
/**
* required string message = 1;
* @return Whether the message field is set.
*/
@java.lang.Override
public boolean hasMessage() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string message = 1;
* @return The message.
*/
@java.lang.Override
public java.lang.String getMessage() {
java.lang.Object ref = message_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
message_ = s;
}
return s;
}
}
/**
* required string message = 1;
* @return The bytes for message.
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getMessageBytes() {
java.lang.Object ref = message_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
message_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasMessage()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, message_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, message_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto) obj;
if (hasMessage() != other.hasMessage()) return false;
if (hasMessage()) {
if (!getMessage()
.equals(other.getMessage())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasMessage()) {
hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
hash = (53 * hash) + getMessage().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.SaveFederationQueuePolicyResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.SaveFederationQueuePolicyResponseProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_SaveFederationQueuePolicyResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_SaveFederationQueuePolicyResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
message_ = "";
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_SaveFederationQueuePolicyResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.message_ = message_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto.getDefaultInstance()) return this;
if (other.hasMessage()) {
message_ = other.message_;
bitField0_ |= 0x00000001;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasMessage()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
message_ = input.readBytes();
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object message_ = "";
/**
* required string message = 1;
* @return Whether the message field is set.
*/
public boolean hasMessage() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string message = 1;
* @return The message.
*/
public java.lang.String getMessage() {
java.lang.Object ref = message_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
message_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string message = 1;
* @return The bytes for message.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getMessageBytes() {
java.lang.Object ref = message_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
message_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* required string message = 1;
* @param value The message to set.
* @return This builder for chaining.
*/
public Builder setMessage(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
message_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* required string message = 1;
* @return This builder for chaining.
*/
public Builder clearMessage() {
message_ = getDefaultInstance().getMessage();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* required string message = 1;
* @param value The bytes for message to set.
* @return This builder for chaining.
*/
public Builder setMessageBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
message_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.SaveFederationQueuePolicyResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.SaveFederationQueuePolicyResponseProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public SaveFederationQueuePolicyResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SaveFederationQueuePolicyResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface BatchSaveFederationQueuePoliciesRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.BatchSaveFederationQueuePoliciesRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* repeated .hadoop.yarn.FederationQueueWeightProto federationQueueWeights = 1;
*/
java.util.List
getFederationQueueWeightsList();
/**
* repeated .hadoop.yarn.FederationQueueWeightProto federationQueueWeights = 1;
*/
org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto getFederationQueueWeights(int index);
/**
* repeated .hadoop.yarn.FederationQueueWeightProto federationQueueWeights = 1;
*/
int getFederationQueueWeightsCount();
/**
* repeated .hadoop.yarn.FederationQueueWeightProto federationQueueWeights = 1;
*/
java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProtoOrBuilder>
getFederationQueueWeightsOrBuilderList();
/**
* repeated .hadoop.yarn.FederationQueueWeightProto federationQueueWeights = 1;
*/
org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProtoOrBuilder getFederationQueueWeightsOrBuilder(
int index);
}
/**
* Protobuf type {@code hadoop.yarn.BatchSaveFederationQueuePoliciesRequestProto}
*/
public static final class BatchSaveFederationQueuePoliciesRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.BatchSaveFederationQueuePoliciesRequestProto)
BatchSaveFederationQueuePoliciesRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use BatchSaveFederationQueuePoliciesRequestProto.newBuilder() to construct.
private BatchSaveFederationQueuePoliciesRequestProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private BatchSaveFederationQueuePoliciesRequestProto() {
federationQueueWeights_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new BatchSaveFederationQueuePoliciesRequestProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_BatchSaveFederationQueuePoliciesRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_BatchSaveFederationQueuePoliciesRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto.Builder.class);
}
public static final int FEDERATIONQUEUEWEIGHTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List federationQueueWeights_;
/**
* repeated .hadoop.yarn.FederationQueueWeightProto federationQueueWeights = 1;
*/
@java.lang.Override
public java.util.List getFederationQueueWeightsList() {
return federationQueueWeights_;
}
/**
* repeated .hadoop.yarn.FederationQueueWeightProto federationQueueWeights = 1;
*/
@java.lang.Override
public java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProtoOrBuilder>
getFederationQueueWeightsOrBuilderList() {
return federationQueueWeights_;
}
/**
* repeated .hadoop.yarn.FederationQueueWeightProto federationQueueWeights = 1;
*/
@java.lang.Override
public int getFederationQueueWeightsCount() {
return federationQueueWeights_.size();
}
/**
* repeated .hadoop.yarn.FederationQueueWeightProto federationQueueWeights = 1;
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto getFederationQueueWeights(int index) {
return federationQueueWeights_.get(index);
}
/**
* repeated .hadoop.yarn.FederationQueueWeightProto federationQueueWeights = 1;
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProtoOrBuilder getFederationQueueWeightsOrBuilder(
int index) {
return federationQueueWeights_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < federationQueueWeights_.size(); i++) {
output.writeMessage(1, federationQueueWeights_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < federationQueueWeights_.size(); i++) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, federationQueueWeights_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto) obj;
if (!getFederationQueueWeightsList()
.equals(other.getFederationQueueWeightsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getFederationQueueWeightsCount() > 0) {
hash = (37 * hash) + FEDERATIONQUEUEWEIGHTS_FIELD_NUMBER;
hash = (53 * hash) + getFederationQueueWeightsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.BatchSaveFederationQueuePoliciesRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.BatchSaveFederationQueuePoliciesRequestProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_BatchSaveFederationQueuePoliciesRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_BatchSaveFederationQueuePoliciesRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (federationQueueWeightsBuilder_ == null) {
federationQueueWeights_ = java.util.Collections.emptyList();
} else {
federationQueueWeights_ = null;
federationQueueWeightsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_BatchSaveFederationQueuePoliciesRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto result) {
if (federationQueueWeightsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
federationQueueWeights_ = java.util.Collections.unmodifiableList(federationQueueWeights_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.federationQueueWeights_ = federationQueueWeights_;
} else {
result.federationQueueWeights_ = federationQueueWeightsBuilder_.build();
}
}
private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto.getDefaultInstance()) return this;
if (federationQueueWeightsBuilder_ == null) {
if (!other.federationQueueWeights_.isEmpty()) {
if (federationQueueWeights_.isEmpty()) {
federationQueueWeights_ = other.federationQueueWeights_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureFederationQueueWeightsIsMutable();
federationQueueWeights_.addAll(other.federationQueueWeights_);
}
onChanged();
}
} else {
if (!other.federationQueueWeights_.isEmpty()) {
if (federationQueueWeightsBuilder_.isEmpty()) {
federationQueueWeightsBuilder_.dispose();
federationQueueWeightsBuilder_ = null;
federationQueueWeights_ = other.federationQueueWeights_;
bitField0_ = (bitField0_ & ~0x00000001);
federationQueueWeightsBuilder_ =
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getFederationQueueWeightsFieldBuilder() : null;
} else {
federationQueueWeightsBuilder_.addAllMessages(other.federationQueueWeights_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto m =
input.readMessage(
org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto.PARSER,
extensionRegistry);
if (federationQueueWeightsBuilder_ == null) {
ensureFederationQueueWeightsIsMutable();
federationQueueWeights_.add(m);
} else {
federationQueueWeightsBuilder_.addMessage(m);
}
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List federationQueueWeights_ =
java.util.Collections.emptyList();
private void ensureFederationQueueWeightsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
federationQueueWeights_ = new java.util.ArrayList(federationQueueWeights_);
bitField0_ |= 0x00000001;
}
}
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto, org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProtoOrBuilder> federationQueueWeightsBuilder_;
/**
* repeated .hadoop.yarn.FederationQueueWeightProto federationQueueWeights = 1;
*/
public java.util.List getFederationQueueWeightsList() {
if (federationQueueWeightsBuilder_ == null) {
return java.util.Collections.unmodifiableList(federationQueueWeights_);
} else {
return federationQueueWeightsBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.yarn.FederationQueueWeightProto federationQueueWeights = 1;
*/
public int getFederationQueueWeightsCount() {
if (federationQueueWeightsBuilder_ == null) {
return federationQueueWeights_.size();
} else {
return federationQueueWeightsBuilder_.getCount();
}
}
/**
* repeated .hadoop.yarn.FederationQueueWeightProto federationQueueWeights = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto getFederationQueueWeights(int index) {
if (federationQueueWeightsBuilder_ == null) {
return federationQueueWeights_.get(index);
} else {
return federationQueueWeightsBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.yarn.FederationQueueWeightProto federationQueueWeights = 1;
*/
public Builder setFederationQueueWeights(
int index, org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto value) {
if (federationQueueWeightsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFederationQueueWeightsIsMutable();
federationQueueWeights_.set(index, value);
onChanged();
} else {
federationQueueWeightsBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.FederationQueueWeightProto federationQueueWeights = 1;
*/
public Builder setFederationQueueWeights(
int index, org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto.Builder builderForValue) {
if (federationQueueWeightsBuilder_ == null) {
ensureFederationQueueWeightsIsMutable();
federationQueueWeights_.set(index, builderForValue.build());
onChanged();
} else {
federationQueueWeightsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.FederationQueueWeightProto federationQueueWeights = 1;
*/
public Builder addFederationQueueWeights(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto value) {
if (federationQueueWeightsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFederationQueueWeightsIsMutable();
federationQueueWeights_.add(value);
onChanged();
} else {
federationQueueWeightsBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.yarn.FederationQueueWeightProto federationQueueWeights = 1;
*/
public Builder addFederationQueueWeights(
int index, org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto value) {
if (federationQueueWeightsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFederationQueueWeightsIsMutable();
federationQueueWeights_.add(index, value);
onChanged();
} else {
federationQueueWeightsBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.FederationQueueWeightProto federationQueueWeights = 1;
*/
public Builder addFederationQueueWeights(
org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto.Builder builderForValue) {
if (federationQueueWeightsBuilder_ == null) {
ensureFederationQueueWeightsIsMutable();
federationQueueWeights_.add(builderForValue.build());
onChanged();
} else {
federationQueueWeightsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.FederationQueueWeightProto federationQueueWeights = 1;
*/
public Builder addFederationQueueWeights(
int index, org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto.Builder builderForValue) {
if (federationQueueWeightsBuilder_ == null) {
ensureFederationQueueWeightsIsMutable();
federationQueueWeights_.add(index, builderForValue.build());
onChanged();
} else {
federationQueueWeightsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.FederationQueueWeightProto federationQueueWeights = 1;
*/
public Builder addAllFederationQueueWeights(
java.lang.Iterable extends org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto> values) {
if (federationQueueWeightsBuilder_ == null) {
ensureFederationQueueWeightsIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, federationQueueWeights_);
onChanged();
} else {
federationQueueWeightsBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.yarn.FederationQueueWeightProto federationQueueWeights = 1;
*/
public Builder clearFederationQueueWeights() {
if (federationQueueWeightsBuilder_ == null) {
federationQueueWeights_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
federationQueueWeightsBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.yarn.FederationQueueWeightProto federationQueueWeights = 1;
*/
public Builder removeFederationQueueWeights(int index) {
if (federationQueueWeightsBuilder_ == null) {
ensureFederationQueueWeightsIsMutable();
federationQueueWeights_.remove(index);
onChanged();
} else {
federationQueueWeightsBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.yarn.FederationQueueWeightProto federationQueueWeights = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto.Builder getFederationQueueWeightsBuilder(
int index) {
return getFederationQueueWeightsFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.yarn.FederationQueueWeightProto federationQueueWeights = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProtoOrBuilder getFederationQueueWeightsOrBuilder(
int index) {
if (federationQueueWeightsBuilder_ == null) {
return federationQueueWeights_.get(index); } else {
return federationQueueWeightsBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.yarn.FederationQueueWeightProto federationQueueWeights = 1;
*/
public java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProtoOrBuilder>
getFederationQueueWeightsOrBuilderList() {
if (federationQueueWeightsBuilder_ != null) {
return federationQueueWeightsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(federationQueueWeights_);
}
}
/**
* repeated .hadoop.yarn.FederationQueueWeightProto federationQueueWeights = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto.Builder addFederationQueueWeightsBuilder() {
return getFederationQueueWeightsFieldBuilder().addBuilder(
org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.FederationQueueWeightProto federationQueueWeights = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto.Builder addFederationQueueWeightsBuilder(
int index) {
return getFederationQueueWeightsFieldBuilder().addBuilder(
index, org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.FederationQueueWeightProto federationQueueWeights = 1;
*/
public java.util.List
getFederationQueueWeightsBuilderList() {
return getFederationQueueWeightsFieldBuilder().getBuilderList();
}
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto, org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProtoOrBuilder>
getFederationQueueWeightsFieldBuilder() {
if (federationQueueWeightsBuilder_ == null) {
federationQueueWeightsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto, org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProtoOrBuilder>(
federationQueueWeights_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
federationQueueWeights_ = null;
}
return federationQueueWeightsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.BatchSaveFederationQueuePoliciesRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.BatchSaveFederationQueuePoliciesRequestProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public BatchSaveFederationQueuePoliciesRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface BatchSaveFederationQueuePoliciesResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.BatchSaveFederationQueuePoliciesResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* required string message = 1;
* @return Whether the message field is set.
*/
boolean hasMessage();
/**
* required string message = 1;
* @return The message.
*/
java.lang.String getMessage();
/**
* required string message = 1;
* @return The bytes for message.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getMessageBytes();
}
/**
* Protobuf type {@code hadoop.yarn.BatchSaveFederationQueuePoliciesResponseProto}
*/
public static final class BatchSaveFederationQueuePoliciesResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.BatchSaveFederationQueuePoliciesResponseProto)
BatchSaveFederationQueuePoliciesResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use BatchSaveFederationQueuePoliciesResponseProto.newBuilder() to construct.
private BatchSaveFederationQueuePoliciesResponseProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private BatchSaveFederationQueuePoliciesResponseProto() {
message_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new BatchSaveFederationQueuePoliciesResponseProto();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_BatchSaveFederationQueuePoliciesResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_BatchSaveFederationQueuePoliciesResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProto.Builder.class);
}
private int bitField0_;
public static final int MESSAGE_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object message_ = "";
/**
* required string message = 1;
* @return Whether the message field is set.
*/
@java.lang.Override
public boolean hasMessage() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string message = 1;
* @return The message.
*/
@java.lang.Override
public java.lang.String getMessage() {
java.lang.Object ref = message_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
message_ = s;
}
return s;
}
}
/**
* required string message = 1;
* @return The bytes for message.
*/
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getMessageBytes() {
java.lang.Object ref = message_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
message_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasMessage()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, message_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, message_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProto) obj;
if (hasMessage() != other.hasMessage()) return false;
if (hasMessage()) {
if (!getMessage()
.equals(other.getMessage())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasMessage()) {
hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
hash = (53 * hash) + getMessage().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.BatchSaveFederationQueuePoliciesResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.BatchSaveFederationQueuePoliciesResponseProto)
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProtoOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_BatchSaveFederationQueuePoliciesResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_BatchSaveFederationQueuePoliciesResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
message_ = "";
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.internal_static_hadoop_yarn_BatchSaveFederationQueuePoliciesResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProto build() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProto result = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.message_ = message_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProto) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProto.getDefaultInstance()) return this;
if (other.hasMessage()) {
message_ = other.message_;
bitField0_ |= 0x00000001;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasMessage()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
message_ = input.readBytes();
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object message_ = "";
/**
* required string message = 1;
* @return Whether the message field is set.
*/
public boolean hasMessage() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string message = 1;
* @return The message.
*/
public java.lang.String getMessage() {
java.lang.Object ref = message_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
message_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string message = 1;
* @return The bytes for message.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getMessageBytes() {
java.lang.Object ref = message_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
message_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* required string message = 1;
* @param value The message to set.
* @return This builder for chaining.
*/
public Builder setMessage(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
message_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* required string message = 1;
* @return This builder for chaining.
*/
public Builder clearMessage() {
message_ = getDefaultInstance().getMessage();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* required string message = 1;
* @param value The bytes for message to set.
* @return This builder for chaining.
*/
public Builder setMessageBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
message_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.BatchSaveFederationQueuePoliciesResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.BatchSaveFederationQueuePoliciesResponseProto)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProto();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.BatchSaveFederationQueuePoliciesResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public BatchSaveFederationQueuePoliciesResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser