Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos Maven / Gradle / Ivy
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: yarn_server_federation_protos.proto
package org.apache.hadoop.yarn.federation.proto;
public final class YarnServerFederationProtos {
private YarnServerFederationProtos() {}
public static void registerAllExtensions(
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite registry) {
}
public static void registerAllExtensions(
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistry registry) {
registerAllExtensions(
(org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite) registry);
}
/**
* Protobuf enum {@code hadoop.yarn.SubClusterStateProto}
*/
public enum SubClusterStateProto
implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
/**
* SC_NEW = 1;
*/
SC_NEW(1),
/**
* SC_RUNNING = 2;
*/
SC_RUNNING(2),
/**
* SC_UNHEALTHY = 3;
*/
SC_UNHEALTHY(3),
/**
* SC_DECOMMISSIONING = 4;
*/
SC_DECOMMISSIONING(4),
/**
* SC_LOST = 5;
*/
SC_LOST(5),
/**
* SC_UNREGISTERED = 6;
*/
SC_UNREGISTERED(6),
/**
* SC_DECOMMISSIONED = 7;
*/
SC_DECOMMISSIONED(7),
;
/**
* SC_NEW = 1;
*/
public static final int SC_NEW_VALUE = 1;
/**
* SC_RUNNING = 2;
*/
public static final int SC_RUNNING_VALUE = 2;
/**
* SC_UNHEALTHY = 3;
*/
public static final int SC_UNHEALTHY_VALUE = 3;
/**
* SC_DECOMMISSIONING = 4;
*/
public static final int SC_DECOMMISSIONING_VALUE = 4;
/**
* SC_LOST = 5;
*/
public static final int SC_LOST_VALUE = 5;
/**
* SC_UNREGISTERED = 6;
*/
public static final int SC_UNREGISTERED_VALUE = 6;
/**
* SC_DECOMMISSIONED = 7;
*/
public static final int SC_DECOMMISSIONED_VALUE = 7;
public final int getNumber() {
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static SubClusterStateProto valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static SubClusterStateProto forNumber(int value) {
switch (value) {
case 1: return SC_NEW;
case 2: return SC_RUNNING;
case 3: return SC_UNHEALTHY;
case 4: return SC_DECOMMISSIONING;
case 5: return SC_LOST;
case 6: return SC_UNREGISTERED;
case 7: return SC_DECOMMISSIONED;
default: return null;
}
}
public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
SubClusterStateProto> internalValueMap =
new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() {
public SubClusterStateProto findValueByNumber(int number) {
return SubClusterStateProto.forNumber(number);
}
};
public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.getDescriptor().getEnumTypes().get(0);
}
private static final SubClusterStateProto[] VALUES = values();
public static SubClusterStateProto valueOf(
org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int value;
private SubClusterStateProto(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:hadoop.yarn.SubClusterStateProto)
}
public interface SubClusterIdProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.SubClusterIdProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional string id = 1;
* @return Whether the id field is set.
*/
boolean hasId();
/**
* optional string id = 1;
* @return The id.
*/
java.lang.String getId();
/**
* optional string id = 1;
* @return The bytes for id.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getIdBytes();
}
/**
* Protobuf type {@code hadoop.yarn.SubClusterIdProto}
*/
public static final class SubClusterIdProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.SubClusterIdProto)
SubClusterIdProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use SubClusterIdProto.newBuilder() to construct.
private SubClusterIdProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private SubClusterIdProto() {
id_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new SubClusterIdProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterIdProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterIdProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder.class);
}
private int bitField0_;
public static final int ID_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object id_ = "";
/**
* optional string id = 1;
* @return Whether the id field is set.
*/
@java.lang.Override
public boolean hasId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string id = 1;
* @return The id.
*/
@java.lang.Override
public java.lang.String getId() {
java.lang.Object ref = id_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
id_ = s;
}
return s;
}
}
/**
* optional string id = 1;
* @return The bytes for id.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getIdBytes() {
java.lang.Object ref = id_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
id_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, id_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, id_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto) obj;
if (hasId() != other.hasId()) return false;
if (hasId()) {
if (!getId()
.equals(other.getId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasId()) {
hash = (37 * hash) + ID_FIELD_NUMBER;
hash = (53 * hash) + getId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.SubClusterIdProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.SubClusterIdProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterIdProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterIdProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
id_ = "";
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterIdProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.id_ = id_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance()) return this;
if (other.hasId()) {
id_ = other.id_;
bitField0_ |= 0x00000001;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
id_ = input.readBytes();
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object id_ = "";
/**
* optional string id = 1;
* @return Whether the id field is set.
*/
public boolean hasId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string id = 1;
* @return The id.
*/
public java.lang.String getId() {
java.lang.Object ref = id_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
id_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string id = 1;
* @return The bytes for id.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getIdBytes() {
java.lang.Object ref = id_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
id_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string id = 1;
* @param value The id to set.
* @return This builder for chaining.
*/
public Builder setId(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
id_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional string id = 1;
* @return This builder for chaining.
*/
public Builder clearId() {
id_ = getDefaultInstance().getId();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* optional string id = 1;
* @param value The bytes for id to set.
* @return This builder for chaining.
*/
public Builder setIdBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
id_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.SubClusterIdProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.SubClusterIdProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public SubClusterIdProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface SubClusterInfoProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.SubClusterInfoProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
boolean hasSubClusterId();
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
* @return The subClusterId.
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto getSubClusterId();
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder getSubClusterIdOrBuilder();
/**
* optional string aMRM_service_address = 2;
* @return Whether the aMRMServiceAddress field is set.
*/
boolean hasAMRMServiceAddress();
/**
* optional string aMRM_service_address = 2;
* @return The aMRMServiceAddress.
*/
java.lang.String getAMRMServiceAddress();
/**
* optional string aMRM_service_address = 2;
* @return The bytes for aMRMServiceAddress.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getAMRMServiceAddressBytes();
/**
* optional string client_rM_service_address = 3;
* @return Whether the clientRMServiceAddress field is set.
*/
boolean hasClientRMServiceAddress();
/**
* optional string client_rM_service_address = 3;
* @return The clientRMServiceAddress.
*/
java.lang.String getClientRMServiceAddress();
/**
* optional string client_rM_service_address = 3;
* @return The bytes for clientRMServiceAddress.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getClientRMServiceAddressBytes();
/**
* optional string rM_admin_service_address = 4;
* @return Whether the rMAdminServiceAddress field is set.
*/
boolean hasRMAdminServiceAddress();
/**
* optional string rM_admin_service_address = 4;
* @return The rMAdminServiceAddress.
*/
java.lang.String getRMAdminServiceAddress();
/**
* optional string rM_admin_service_address = 4;
* @return The bytes for rMAdminServiceAddress.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getRMAdminServiceAddressBytes();
/**
* optional string rM_web_service_address = 5;
* @return Whether the rMWebServiceAddress field is set.
*/
boolean hasRMWebServiceAddress();
/**
* optional string rM_web_service_address = 5;
* @return The rMWebServiceAddress.
*/
java.lang.String getRMWebServiceAddress();
/**
* optional string rM_web_service_address = 5;
* @return The bytes for rMWebServiceAddress.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getRMWebServiceAddressBytes();
/**
* optional int64 lastHeartBeat = 6;
* @return Whether the lastHeartBeat field is set.
*/
boolean hasLastHeartBeat();
/**
* optional int64 lastHeartBeat = 6;
* @return The lastHeartBeat.
*/
long getLastHeartBeat();
/**
* optional .hadoop.yarn.SubClusterStateProto state = 7;
* @return Whether the state field is set.
*/
boolean hasState();
/**
* optional .hadoop.yarn.SubClusterStateProto state = 7;
* @return The state.
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto getState();
/**
* optional int64 lastStartTime = 8;
* @return Whether the lastStartTime field is set.
*/
boolean hasLastStartTime();
/**
* optional int64 lastStartTime = 8;
* @return The lastStartTime.
*/
long getLastStartTime();
/**
* optional string capability = 9;
* @return Whether the capability field is set.
*/
boolean hasCapability();
/**
* optional string capability = 9;
* @return The capability.
*/
java.lang.String getCapability();
/**
* optional string capability = 9;
* @return The bytes for capability.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getCapabilityBytes();
}
/**
* Protobuf type {@code hadoop.yarn.SubClusterInfoProto}
*/
public static final class SubClusterInfoProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.SubClusterInfoProto)
SubClusterInfoProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use SubClusterInfoProto.newBuilder() to construct.
private SubClusterInfoProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private SubClusterInfoProto() {
aMRMServiceAddress_ = "";
clientRMServiceAddress_ = "";
rMAdminServiceAddress_ = "";
rMWebServiceAddress_ = "";
state_ = 1;
capability_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new SubClusterInfoProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterInfoProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterInfoProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.Builder.class);
}
private int bitField0_;
public static final int SUB_CLUSTER_ID_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto subClusterId_;
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
@java.lang.Override
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
* @return The subClusterId.
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto getSubClusterId() {
return subClusterId_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance() : subClusterId_;
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder getSubClusterIdOrBuilder() {
return subClusterId_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance() : subClusterId_;
}
public static final int AMRM_SERVICE_ADDRESS_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object aMRMServiceAddress_ = "";
/**
* optional string aMRM_service_address = 2;
* @return Whether the aMRMServiceAddress field is set.
*/
@java.lang.Override
public boolean hasAMRMServiceAddress() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional string aMRM_service_address = 2;
* @return The aMRMServiceAddress.
*/
@java.lang.Override
public java.lang.String getAMRMServiceAddress() {
java.lang.Object ref = aMRMServiceAddress_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
aMRMServiceAddress_ = s;
}
return s;
}
}
/**
* optional string aMRM_service_address = 2;
* @return The bytes for aMRMServiceAddress.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getAMRMServiceAddressBytes() {
java.lang.Object ref = aMRMServiceAddress_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
aMRMServiceAddress_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int CLIENT_RM_SERVICE_ADDRESS_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object clientRMServiceAddress_ = "";
/**
* optional string client_rM_service_address = 3;
* @return Whether the clientRMServiceAddress field is set.
*/
@java.lang.Override
public boolean hasClientRMServiceAddress() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional string client_rM_service_address = 3;
* @return The clientRMServiceAddress.
*/
@java.lang.Override
public java.lang.String getClientRMServiceAddress() {
java.lang.Object ref = clientRMServiceAddress_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
clientRMServiceAddress_ = s;
}
return s;
}
}
/**
* optional string client_rM_service_address = 3;
* @return The bytes for clientRMServiceAddress.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getClientRMServiceAddressBytes() {
java.lang.Object ref = clientRMServiceAddress_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
clientRMServiceAddress_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int RM_ADMIN_SERVICE_ADDRESS_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object rMAdminServiceAddress_ = "";
/**
* optional string rM_admin_service_address = 4;
* @return Whether the rMAdminServiceAddress field is set.
*/
@java.lang.Override
public boolean hasRMAdminServiceAddress() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* optional string rM_admin_service_address = 4;
* @return The rMAdminServiceAddress.
*/
@java.lang.Override
public java.lang.String getRMAdminServiceAddress() {
java.lang.Object ref = rMAdminServiceAddress_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
rMAdminServiceAddress_ = s;
}
return s;
}
}
/**
* optional string rM_admin_service_address = 4;
* @return The bytes for rMAdminServiceAddress.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getRMAdminServiceAddressBytes() {
java.lang.Object ref = rMAdminServiceAddress_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
rMAdminServiceAddress_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int RM_WEB_SERVICE_ADDRESS_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private volatile java.lang.Object rMWebServiceAddress_ = "";
/**
* optional string rM_web_service_address = 5;
* @return Whether the rMWebServiceAddress field is set.
*/
@java.lang.Override
public boolean hasRMWebServiceAddress() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* optional string rM_web_service_address = 5;
* @return The rMWebServiceAddress.
*/
@java.lang.Override
public java.lang.String getRMWebServiceAddress() {
java.lang.Object ref = rMWebServiceAddress_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
rMWebServiceAddress_ = s;
}
return s;
}
}
/**
* optional string rM_web_service_address = 5;
* @return The bytes for rMWebServiceAddress.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getRMWebServiceAddressBytes() {
java.lang.Object ref = rMWebServiceAddress_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
rMWebServiceAddress_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int LASTHEARTBEAT_FIELD_NUMBER = 6;
private long lastHeartBeat_ = 0L;
/**
* optional int64 lastHeartBeat = 6;
* @return Whether the lastHeartBeat field is set.
*/
@java.lang.Override
public boolean hasLastHeartBeat() {
return ((bitField0_ & 0x00000020) != 0);
}
/**
* optional int64 lastHeartBeat = 6;
* @return The lastHeartBeat.
*/
@java.lang.Override
public long getLastHeartBeat() {
return lastHeartBeat_;
}
public static final int STATE_FIELD_NUMBER = 7;
private int state_ = 1;
/**
* optional .hadoop.yarn.SubClusterStateProto state = 7;
* @return Whether the state field is set.
*/
@java.lang.Override public boolean hasState() {
return ((bitField0_ & 0x00000040) != 0);
}
/**
* optional .hadoop.yarn.SubClusterStateProto state = 7;
* @return The state.
*/
@java.lang.Override public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto getState() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto result = org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto.forNumber(state_);
return result == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto.SC_NEW : result;
}
public static final int LASTSTARTTIME_FIELD_NUMBER = 8;
private long lastStartTime_ = 0L;
/**
* optional int64 lastStartTime = 8;
* @return Whether the lastStartTime field is set.
*/
@java.lang.Override
public boolean hasLastStartTime() {
return ((bitField0_ & 0x00000080) != 0);
}
/**
* optional int64 lastStartTime = 8;
* @return The lastStartTime.
*/
@java.lang.Override
public long getLastStartTime() {
return lastStartTime_;
}
public static final int CAPABILITY_FIELD_NUMBER = 9;
@SuppressWarnings("serial")
private volatile java.lang.Object capability_ = "";
/**
* optional string capability = 9;
* @return Whether the capability field is set.
*/
@java.lang.Override
public boolean hasCapability() {
return ((bitField0_ & 0x00000100) != 0);
}
/**
* optional string capability = 9;
* @return The capability.
*/
@java.lang.Override
public java.lang.String getCapability() {
java.lang.Object ref = capability_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
capability_ = s;
}
return s;
}
}
/**
* optional string capability = 9;
* @return The bytes for capability.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getCapabilityBytes() {
java.lang.Object ref = capability_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
capability_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getSubClusterId());
}
if (((bitField0_ & 0x00000002) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, aMRMServiceAddress_);
}
if (((bitField0_ & 0x00000004) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, clientRMServiceAddress_);
}
if (((bitField0_ & 0x00000008) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 4, rMAdminServiceAddress_);
}
if (((bitField0_ & 0x00000010) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 5, rMWebServiceAddress_);
}
if (((bitField0_ & 0x00000020) != 0)) {
output.writeInt64(6, lastHeartBeat_);
}
if (((bitField0_ & 0x00000040) != 0)) {
output.writeEnum(7, state_);
}
if (((bitField0_ & 0x00000080) != 0)) {
output.writeInt64(8, lastStartTime_);
}
if (((bitField0_ & 0x00000100) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 9, capability_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getSubClusterId());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, aMRMServiceAddress_);
}
if (((bitField0_ & 0x00000004) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, clientRMServiceAddress_);
}
if (((bitField0_ & 0x00000008) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(4, rMAdminServiceAddress_);
}
if (((bitField0_ & 0x00000010) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(5, rMWebServiceAddress_);
}
if (((bitField0_ & 0x00000020) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt64Size(6, lastHeartBeat_);
}
if (((bitField0_ & 0x00000040) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeEnumSize(7, state_);
}
if (((bitField0_ & 0x00000080) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt64Size(8, lastStartTime_);
}
if (((bitField0_ & 0x00000100) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(9, capability_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto) obj;
if (hasSubClusterId() != other.hasSubClusterId()) return false;
if (hasSubClusterId()) {
if (!getSubClusterId()
.equals(other.getSubClusterId())) return false;
}
if (hasAMRMServiceAddress() != other.hasAMRMServiceAddress()) return false;
if (hasAMRMServiceAddress()) {
if (!getAMRMServiceAddress()
.equals(other.getAMRMServiceAddress())) return false;
}
if (hasClientRMServiceAddress() != other.hasClientRMServiceAddress()) return false;
if (hasClientRMServiceAddress()) {
if (!getClientRMServiceAddress()
.equals(other.getClientRMServiceAddress())) return false;
}
if (hasRMAdminServiceAddress() != other.hasRMAdminServiceAddress()) return false;
if (hasRMAdminServiceAddress()) {
if (!getRMAdminServiceAddress()
.equals(other.getRMAdminServiceAddress())) return false;
}
if (hasRMWebServiceAddress() != other.hasRMWebServiceAddress()) return false;
if (hasRMWebServiceAddress()) {
if (!getRMWebServiceAddress()
.equals(other.getRMWebServiceAddress())) return false;
}
if (hasLastHeartBeat() != other.hasLastHeartBeat()) return false;
if (hasLastHeartBeat()) {
if (getLastHeartBeat()
!= other.getLastHeartBeat()) return false;
}
if (hasState() != other.hasState()) return false;
if (hasState()) {
if (state_ != other.state_) return false;
}
if (hasLastStartTime() != other.hasLastStartTime()) return false;
if (hasLastStartTime()) {
if (getLastStartTime()
!= other.getLastStartTime()) return false;
}
if (hasCapability() != other.hasCapability()) return false;
if (hasCapability()) {
if (!getCapability()
.equals(other.getCapability())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasSubClusterId()) {
hash = (37 * hash) + SUB_CLUSTER_ID_FIELD_NUMBER;
hash = (53 * hash) + getSubClusterId().hashCode();
}
if (hasAMRMServiceAddress()) {
hash = (37 * hash) + AMRM_SERVICE_ADDRESS_FIELD_NUMBER;
hash = (53 * hash) + getAMRMServiceAddress().hashCode();
}
if (hasClientRMServiceAddress()) {
hash = (37 * hash) + CLIENT_RM_SERVICE_ADDRESS_FIELD_NUMBER;
hash = (53 * hash) + getClientRMServiceAddress().hashCode();
}
if (hasRMAdminServiceAddress()) {
hash = (37 * hash) + RM_ADMIN_SERVICE_ADDRESS_FIELD_NUMBER;
hash = (53 * hash) + getRMAdminServiceAddress().hashCode();
}
if (hasRMWebServiceAddress()) {
hash = (37 * hash) + RM_WEB_SERVICE_ADDRESS_FIELD_NUMBER;
hash = (53 * hash) + getRMWebServiceAddress().hashCode();
}
if (hasLastHeartBeat()) {
hash = (37 * hash) + LASTHEARTBEAT_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
getLastHeartBeat());
}
if (hasState()) {
hash = (37 * hash) + STATE_FIELD_NUMBER;
hash = (53 * hash) + state_;
}
if (hasLastStartTime()) {
hash = (37 * hash) + LASTSTARTTIME_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
getLastStartTime());
}
if (hasCapability()) {
hash = (37 * hash) + CAPABILITY_FIELD_NUMBER;
hash = (53 * hash) + getCapability().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.SubClusterInfoProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.SubClusterInfoProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterInfoProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterInfoProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getSubClusterIdFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
subClusterId_ = null;
if (subClusterIdBuilder_ != null) {
subClusterIdBuilder_.dispose();
subClusterIdBuilder_ = null;
}
aMRMServiceAddress_ = "";
clientRMServiceAddress_ = "";
rMAdminServiceAddress_ = "";
rMWebServiceAddress_ = "";
lastHeartBeat_ = 0L;
state_ = 1;
lastStartTime_ = 0L;
capability_ = "";
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterInfoProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.subClusterId_ = subClusterIdBuilder_ == null
? subClusterId_
: subClusterIdBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.aMRMServiceAddress_ = aMRMServiceAddress_;
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.clientRMServiceAddress_ = clientRMServiceAddress_;
to_bitField0_ |= 0x00000004;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.rMAdminServiceAddress_ = rMAdminServiceAddress_;
to_bitField0_ |= 0x00000008;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.rMWebServiceAddress_ = rMWebServiceAddress_;
to_bitField0_ |= 0x00000010;
}
if (((from_bitField0_ & 0x00000020) != 0)) {
result.lastHeartBeat_ = lastHeartBeat_;
to_bitField0_ |= 0x00000020;
}
if (((from_bitField0_ & 0x00000040) != 0)) {
result.state_ = state_;
to_bitField0_ |= 0x00000040;
}
if (((from_bitField0_ & 0x00000080) != 0)) {
result.lastStartTime_ = lastStartTime_;
to_bitField0_ |= 0x00000080;
}
if (((from_bitField0_ & 0x00000100) != 0)) {
result.capability_ = capability_;
to_bitField0_ |= 0x00000100;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.getDefaultInstance()) return this;
if (other.hasSubClusterId()) {
mergeSubClusterId(other.getSubClusterId());
}
if (other.hasAMRMServiceAddress()) {
aMRMServiceAddress_ = other.aMRMServiceAddress_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasClientRMServiceAddress()) {
clientRMServiceAddress_ = other.clientRMServiceAddress_;
bitField0_ |= 0x00000004;
onChanged();
}
if (other.hasRMAdminServiceAddress()) {
rMAdminServiceAddress_ = other.rMAdminServiceAddress_;
bitField0_ |= 0x00000008;
onChanged();
}
if (other.hasRMWebServiceAddress()) {
rMWebServiceAddress_ = other.rMWebServiceAddress_;
bitField0_ |= 0x00000010;
onChanged();
}
if (other.hasLastHeartBeat()) {
setLastHeartBeat(other.getLastHeartBeat());
}
if (other.hasState()) {
setState(other.getState());
}
if (other.hasLastStartTime()) {
setLastStartTime(other.getLastStartTime());
}
if (other.hasCapability()) {
capability_ = other.capability_;
bitField0_ |= 0x00000100;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getSubClusterIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
aMRMServiceAddress_ = input.readBytes();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26: {
clientRMServiceAddress_ = input.readBytes();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34: {
rMAdminServiceAddress_ = input.readBytes();
bitField0_ |= 0x00000008;
break;
} // case 34
case 42: {
rMWebServiceAddress_ = input.readBytes();
bitField0_ |= 0x00000010;
break;
} // case 42
case 48: {
lastHeartBeat_ = input.readInt64();
bitField0_ |= 0x00000020;
break;
} // case 48
case 56: {
int tmpRaw = input.readEnum();
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto tmpValue =
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto.forNumber(tmpRaw);
if (tmpValue == null) {
mergeUnknownVarintField(7, tmpRaw);
} else {
state_ = tmpRaw;
bitField0_ |= 0x00000040;
}
break;
} // case 56
case 64: {
lastStartTime_ = input.readInt64();
bitField0_ |= 0x00000080;
break;
} // case 64
case 74: {
capability_ = input.readBytes();
bitField0_ |= 0x00000100;
break;
} // case 74
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto subClusterId_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder> subClusterIdBuilder_;
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
* @return The subClusterId.
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto getSubClusterId() {
if (subClusterIdBuilder_ == null) {
return subClusterId_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance() : subClusterId_;
} else {
return subClusterIdBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
public Builder setSubClusterId(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto value) {
if (subClusterIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
subClusterId_ = value;
} else {
subClusterIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
public Builder setSubClusterId(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder builderForValue) {
if (subClusterIdBuilder_ == null) {
subClusterId_ = builderForValue.build();
} else {
subClusterIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
public Builder mergeSubClusterId(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto value) {
if (subClusterIdBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
subClusterId_ != null &&
subClusterId_ != org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance()) {
getSubClusterIdBuilder().mergeFrom(value);
} else {
subClusterId_ = value;
}
} else {
subClusterIdBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
public Builder clearSubClusterId() {
bitField0_ = (bitField0_ & ~0x00000001);
subClusterId_ = null;
if (subClusterIdBuilder_ != null) {
subClusterIdBuilder_.dispose();
subClusterIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder getSubClusterIdBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getSubClusterIdFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder getSubClusterIdOrBuilder() {
if (subClusterIdBuilder_ != null) {
return subClusterIdBuilder_.getMessageOrBuilder();
} else {
return subClusterId_ == null ?
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance() : subClusterId_;
}
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder>
getSubClusterIdFieldBuilder() {
if (subClusterIdBuilder_ == null) {
subClusterIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder>(
getSubClusterId(),
getParentForChildren(),
isClean());
subClusterId_ = null;
}
return subClusterIdBuilder_;
}
private java.lang.Object aMRMServiceAddress_ = "";
/**
* optional string aMRM_service_address = 2;
* @return Whether the aMRMServiceAddress field is set.
*/
public boolean hasAMRMServiceAddress() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional string aMRM_service_address = 2;
* @return The aMRMServiceAddress.
*/
public java.lang.String getAMRMServiceAddress() {
java.lang.Object ref = aMRMServiceAddress_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
aMRMServiceAddress_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string aMRM_service_address = 2;
* @return The bytes for aMRMServiceAddress.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getAMRMServiceAddressBytes() {
java.lang.Object ref = aMRMServiceAddress_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
aMRMServiceAddress_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string aMRM_service_address = 2;
* @param value The aMRMServiceAddress to set.
* @return This builder for chaining.
*/
public Builder setAMRMServiceAddress(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
aMRMServiceAddress_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* optional string aMRM_service_address = 2;
* @return This builder for chaining.
*/
public Builder clearAMRMServiceAddress() {
aMRMServiceAddress_ = getDefaultInstance().getAMRMServiceAddress();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* optional string aMRM_service_address = 2;
* @param value The bytes for aMRMServiceAddress to set.
* @return This builder for chaining.
*/
public Builder setAMRMServiceAddressBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
aMRMServiceAddress_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object clientRMServiceAddress_ = "";
/**
* optional string client_rM_service_address = 3;
* @return Whether the clientRMServiceAddress field is set.
*/
public boolean hasClientRMServiceAddress() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional string client_rM_service_address = 3;
* @return The clientRMServiceAddress.
*/
public java.lang.String getClientRMServiceAddress() {
java.lang.Object ref = clientRMServiceAddress_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
clientRMServiceAddress_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string client_rM_service_address = 3;
* @return The bytes for clientRMServiceAddress.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getClientRMServiceAddressBytes() {
java.lang.Object ref = clientRMServiceAddress_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
clientRMServiceAddress_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string client_rM_service_address = 3;
* @param value The clientRMServiceAddress to set.
* @return This builder for chaining.
*/
public Builder setClientRMServiceAddress(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
clientRMServiceAddress_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* optional string client_rM_service_address = 3;
* @return This builder for chaining.
*/
public Builder clearClientRMServiceAddress() {
clientRMServiceAddress_ = getDefaultInstance().getClientRMServiceAddress();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
* optional string client_rM_service_address = 3;
* @param value The bytes for clientRMServiceAddress to set.
* @return This builder for chaining.
*/
public Builder setClientRMServiceAddressBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
clientRMServiceAddress_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object rMAdminServiceAddress_ = "";
/**
* optional string rM_admin_service_address = 4;
* @return Whether the rMAdminServiceAddress field is set.
*/
public boolean hasRMAdminServiceAddress() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* optional string rM_admin_service_address = 4;
* @return The rMAdminServiceAddress.
*/
public java.lang.String getRMAdminServiceAddress() {
java.lang.Object ref = rMAdminServiceAddress_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
rMAdminServiceAddress_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string rM_admin_service_address = 4;
* @return The bytes for rMAdminServiceAddress.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getRMAdminServiceAddressBytes() {
java.lang.Object ref = rMAdminServiceAddress_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
rMAdminServiceAddress_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string rM_admin_service_address = 4;
* @param value The rMAdminServiceAddress to set.
* @return This builder for chaining.
*/
public Builder setRMAdminServiceAddress(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
rMAdminServiceAddress_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
* optional string rM_admin_service_address = 4;
* @return This builder for chaining.
*/
public Builder clearRMAdminServiceAddress() {
rMAdminServiceAddress_ = getDefaultInstance().getRMAdminServiceAddress();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
* optional string rM_admin_service_address = 4;
* @param value The bytes for rMAdminServiceAddress to set.
* @return This builder for chaining.
*/
public Builder setRMAdminServiceAddressBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
rMAdminServiceAddress_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
private java.lang.Object rMWebServiceAddress_ = "";
/**
* optional string rM_web_service_address = 5;
* @return Whether the rMWebServiceAddress field is set.
*/
public boolean hasRMWebServiceAddress() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* optional string rM_web_service_address = 5;
* @return The rMWebServiceAddress.
*/
public java.lang.String getRMWebServiceAddress() {
java.lang.Object ref = rMWebServiceAddress_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
rMWebServiceAddress_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string rM_web_service_address = 5;
* @return The bytes for rMWebServiceAddress.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getRMWebServiceAddressBytes() {
java.lang.Object ref = rMWebServiceAddress_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
rMWebServiceAddress_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string rM_web_service_address = 5;
* @param value The rMWebServiceAddress to set.
* @return This builder for chaining.
*/
public Builder setRMWebServiceAddress(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
rMWebServiceAddress_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
* optional string rM_web_service_address = 5;
* @return This builder for chaining.
*/
public Builder clearRMWebServiceAddress() {
rMWebServiceAddress_ = getDefaultInstance().getRMWebServiceAddress();
bitField0_ = (bitField0_ & ~0x00000010);
onChanged();
return this;
}
/**
* optional string rM_web_service_address = 5;
* @param value The bytes for rMWebServiceAddress to set.
* @return This builder for chaining.
*/
public Builder setRMWebServiceAddressBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
rMWebServiceAddress_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
private long lastHeartBeat_ ;
/**
* optional int64 lastHeartBeat = 6;
* @return Whether the lastHeartBeat field is set.
*/
@java.lang.Override
public boolean hasLastHeartBeat() {
return ((bitField0_ & 0x00000020) != 0);
}
/**
* optional int64 lastHeartBeat = 6;
* @return The lastHeartBeat.
*/
@java.lang.Override
public long getLastHeartBeat() {
return lastHeartBeat_;
}
/**
* optional int64 lastHeartBeat = 6;
* @param value The lastHeartBeat to set.
* @return This builder for chaining.
*/
public Builder setLastHeartBeat(long value) {
lastHeartBeat_ = value;
bitField0_ |= 0x00000020;
onChanged();
return this;
}
/**
* optional int64 lastHeartBeat = 6;
* @return This builder for chaining.
*/
public Builder clearLastHeartBeat() {
bitField0_ = (bitField0_ & ~0x00000020);
lastHeartBeat_ = 0L;
onChanged();
return this;
}
private int state_ = 1;
/**
* optional .hadoop.yarn.SubClusterStateProto state = 7;
* @return Whether the state field is set.
*/
@java.lang.Override public boolean hasState() {
return ((bitField0_ & 0x00000040) != 0);
}
/**
* optional .hadoop.yarn.SubClusterStateProto state = 7;
* @return The state.
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto getState() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto result = org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto.forNumber(state_);
return result == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto.SC_NEW : result;
}
/**
* optional .hadoop.yarn.SubClusterStateProto state = 7;
* @param value The state to set.
* @return This builder for chaining.
*/
public Builder setState(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000040;
state_ = value.getNumber();
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterStateProto state = 7;
* @return This builder for chaining.
*/
public Builder clearState() {
bitField0_ = (bitField0_ & ~0x00000040);
state_ = 1;
onChanged();
return this;
}
private long lastStartTime_ ;
/**
* optional int64 lastStartTime = 8;
* @return Whether the lastStartTime field is set.
*/
@java.lang.Override
public boolean hasLastStartTime() {
return ((bitField0_ & 0x00000080) != 0);
}
/**
* optional int64 lastStartTime = 8;
* @return The lastStartTime.
*/
@java.lang.Override
public long getLastStartTime() {
return lastStartTime_;
}
/**
* optional int64 lastStartTime = 8;
* @param value The lastStartTime to set.
* @return This builder for chaining.
*/
public Builder setLastStartTime(long value) {
lastStartTime_ = value;
bitField0_ |= 0x00000080;
onChanged();
return this;
}
/**
* optional int64 lastStartTime = 8;
* @return This builder for chaining.
*/
public Builder clearLastStartTime() {
bitField0_ = (bitField0_ & ~0x00000080);
lastStartTime_ = 0L;
onChanged();
return this;
}
private java.lang.Object capability_ = "";
/**
* optional string capability = 9;
* @return Whether the capability field is set.
*/
public boolean hasCapability() {
return ((bitField0_ & 0x00000100) != 0);
}
/**
* optional string capability = 9;
* @return The capability.
*/
public java.lang.String getCapability() {
java.lang.Object ref = capability_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
capability_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string capability = 9;
* @return The bytes for capability.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getCapabilityBytes() {
java.lang.Object ref = capability_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
capability_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string capability = 9;
* @param value The capability to set.
* @return This builder for chaining.
*/
public Builder setCapability(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
capability_ = value;
bitField0_ |= 0x00000100;
onChanged();
return this;
}
/**
* optional string capability = 9;
* @return This builder for chaining.
*/
public Builder clearCapability() {
capability_ = getDefaultInstance().getCapability();
bitField0_ = (bitField0_ & ~0x00000100);
onChanged();
return this;
}
/**
* optional string capability = 9;
* @param value The bytes for capability to set.
* @return This builder for chaining.
*/
public Builder setCapabilityBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
capability_ = value;
bitField0_ |= 0x00000100;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.SubClusterInfoProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.SubClusterInfoProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public SubClusterInfoProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface SubClusterRegisterRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.SubClusterRegisterRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.SubClusterInfoProto sub_cluster_info = 1;
* @return Whether the subClusterInfo field is set.
*/
boolean hasSubClusterInfo();
/**
* optional .hadoop.yarn.SubClusterInfoProto sub_cluster_info = 1;
* @return The subClusterInfo.
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto getSubClusterInfo();
/**
* optional .hadoop.yarn.SubClusterInfoProto sub_cluster_info = 1;
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProtoOrBuilder getSubClusterInfoOrBuilder();
}
/**
* Protobuf type {@code hadoop.yarn.SubClusterRegisterRequestProto}
*/
public static final class SubClusterRegisterRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.SubClusterRegisterRequestProto)
SubClusterRegisterRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use SubClusterRegisterRequestProto.newBuilder() to construct.
private SubClusterRegisterRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private SubClusterRegisterRequestProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new SubClusterRegisterRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterRegisterRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterRegisterRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto.Builder.class);
}
private int bitField0_;
public static final int SUB_CLUSTER_INFO_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto subClusterInfo_;
/**
* optional .hadoop.yarn.SubClusterInfoProto sub_cluster_info = 1;
* @return Whether the subClusterInfo field is set.
*/
@java.lang.Override
public boolean hasSubClusterInfo() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.SubClusterInfoProto sub_cluster_info = 1;
* @return The subClusterInfo.
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto getSubClusterInfo() {
return subClusterInfo_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.getDefaultInstance() : subClusterInfo_;
}
/**
* optional .hadoop.yarn.SubClusterInfoProto sub_cluster_info = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProtoOrBuilder getSubClusterInfoOrBuilder() {
return subClusterInfo_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.getDefaultInstance() : subClusterInfo_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getSubClusterInfo());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getSubClusterInfo());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto) obj;
if (hasSubClusterInfo() != other.hasSubClusterInfo()) return false;
if (hasSubClusterInfo()) {
if (!getSubClusterInfo()
.equals(other.getSubClusterInfo())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasSubClusterInfo()) {
hash = (37 * hash) + SUB_CLUSTER_INFO_FIELD_NUMBER;
hash = (53 * hash) + getSubClusterInfo().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.SubClusterRegisterRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.SubClusterRegisterRequestProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterRegisterRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterRegisterRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getSubClusterInfoFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
subClusterInfo_ = null;
if (subClusterInfoBuilder_ != null) {
subClusterInfoBuilder_.dispose();
subClusterInfoBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterRegisterRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.subClusterInfo_ = subClusterInfoBuilder_ == null
? subClusterInfo_
: subClusterInfoBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto.getDefaultInstance()) return this;
if (other.hasSubClusterInfo()) {
mergeSubClusterInfo(other.getSubClusterInfo());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getSubClusterInfoFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto subClusterInfo_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProtoOrBuilder> subClusterInfoBuilder_;
/**
* optional .hadoop.yarn.SubClusterInfoProto sub_cluster_info = 1;
* @return Whether the subClusterInfo field is set.
*/
public boolean hasSubClusterInfo() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.SubClusterInfoProto sub_cluster_info = 1;
* @return The subClusterInfo.
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto getSubClusterInfo() {
if (subClusterInfoBuilder_ == null) {
return subClusterInfo_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.getDefaultInstance() : subClusterInfo_;
} else {
return subClusterInfoBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.SubClusterInfoProto sub_cluster_info = 1;
*/
public Builder setSubClusterInfo(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto value) {
if (subClusterInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
subClusterInfo_ = value;
} else {
subClusterInfoBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterInfoProto sub_cluster_info = 1;
*/
public Builder setSubClusterInfo(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.Builder builderForValue) {
if (subClusterInfoBuilder_ == null) {
subClusterInfo_ = builderForValue.build();
} else {
subClusterInfoBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterInfoProto sub_cluster_info = 1;
*/
public Builder mergeSubClusterInfo(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto value) {
if (subClusterInfoBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
subClusterInfo_ != null &&
subClusterInfo_ != org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.getDefaultInstance()) {
getSubClusterInfoBuilder().mergeFrom(value);
} else {
subClusterInfo_ = value;
}
} else {
subClusterInfoBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterInfoProto sub_cluster_info = 1;
*/
public Builder clearSubClusterInfo() {
bitField0_ = (bitField0_ & ~0x00000001);
subClusterInfo_ = null;
if (subClusterInfoBuilder_ != null) {
subClusterInfoBuilder_.dispose();
subClusterInfoBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterInfoProto sub_cluster_info = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.Builder getSubClusterInfoBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getSubClusterInfoFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.SubClusterInfoProto sub_cluster_info = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProtoOrBuilder getSubClusterInfoOrBuilder() {
if (subClusterInfoBuilder_ != null) {
return subClusterInfoBuilder_.getMessageOrBuilder();
} else {
return subClusterInfo_ == null ?
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.getDefaultInstance() : subClusterInfo_;
}
}
/**
* optional .hadoop.yarn.SubClusterInfoProto sub_cluster_info = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProtoOrBuilder>
getSubClusterInfoFieldBuilder() {
if (subClusterInfoBuilder_ == null) {
subClusterInfoBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProtoOrBuilder>(
getSubClusterInfo(),
getParentForChildren(),
isClean());
subClusterInfo_ = null;
}
return subClusterInfoBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.SubClusterRegisterRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.SubClusterRegisterRequestProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public SubClusterRegisterRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface SubClusterRegisterResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.SubClusterRegisterResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.yarn.SubClusterRegisterResponseProto}
*/
public static final class SubClusterRegisterResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.SubClusterRegisterResponseProto)
SubClusterRegisterResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use SubClusterRegisterResponseProto.newBuilder() to construct.
private SubClusterRegisterResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private SubClusterRegisterResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new SubClusterRegisterResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterRegisterResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterRegisterResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.SubClusterRegisterResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.SubClusterRegisterResponseProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterRegisterResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterRegisterResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterRegisterResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.SubClusterRegisterResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.SubClusterRegisterResponseProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public SubClusterRegisterResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterRegisterResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface SubClusterHeartbeatRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.SubClusterHeartbeatRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
boolean hasSubClusterId();
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
* @return The subClusterId.
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto getSubClusterId();
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder getSubClusterIdOrBuilder();
/**
* optional int64 lastHeartBeat = 2;
* @return Whether the lastHeartBeat field is set.
*/
boolean hasLastHeartBeat();
/**
* optional int64 lastHeartBeat = 2;
* @return The lastHeartBeat.
*/
long getLastHeartBeat();
/**
* optional .hadoop.yarn.SubClusterStateProto state = 3;
* @return Whether the state field is set.
*/
boolean hasState();
/**
* optional .hadoop.yarn.SubClusterStateProto state = 3;
* @return The state.
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto getState();
/**
* optional string capability = 4;
* @return Whether the capability field is set.
*/
boolean hasCapability();
/**
* optional string capability = 4;
* @return The capability.
*/
java.lang.String getCapability();
/**
* optional string capability = 4;
* @return The bytes for capability.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getCapabilityBytes();
}
/**
* Protobuf type {@code hadoop.yarn.SubClusterHeartbeatRequestProto}
*/
public static final class SubClusterHeartbeatRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.SubClusterHeartbeatRequestProto)
SubClusterHeartbeatRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use SubClusterHeartbeatRequestProto.newBuilder() to construct.
private SubClusterHeartbeatRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private SubClusterHeartbeatRequestProto() {
state_ = 1;
capability_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new SubClusterHeartbeatRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterHeartbeatRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterHeartbeatRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto.Builder.class);
}
private int bitField0_;
public static final int SUB_CLUSTER_ID_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto subClusterId_;
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
@java.lang.Override
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
* @return The subClusterId.
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto getSubClusterId() {
return subClusterId_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance() : subClusterId_;
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder getSubClusterIdOrBuilder() {
return subClusterId_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance() : subClusterId_;
}
public static final int LASTHEARTBEAT_FIELD_NUMBER = 2;
private long lastHeartBeat_ = 0L;
/**
* optional int64 lastHeartBeat = 2;
* @return Whether the lastHeartBeat field is set.
*/
@java.lang.Override
public boolean hasLastHeartBeat() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional int64 lastHeartBeat = 2;
* @return The lastHeartBeat.
*/
@java.lang.Override
public long getLastHeartBeat() {
return lastHeartBeat_;
}
public static final int STATE_FIELD_NUMBER = 3;
private int state_ = 1;
/**
* optional .hadoop.yarn.SubClusterStateProto state = 3;
* @return Whether the state field is set.
*/
@java.lang.Override public boolean hasState() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional .hadoop.yarn.SubClusterStateProto state = 3;
* @return The state.
*/
@java.lang.Override public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto getState() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto result = org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto.forNumber(state_);
return result == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto.SC_NEW : result;
}
public static final int CAPABILITY_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object capability_ = "";
/**
* optional string capability = 4;
* @return Whether the capability field is set.
*/
@java.lang.Override
public boolean hasCapability() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* optional string capability = 4;
* @return The capability.
*/
@java.lang.Override
public java.lang.String getCapability() {
java.lang.Object ref = capability_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
capability_ = s;
}
return s;
}
}
/**
* optional string capability = 4;
* @return The bytes for capability.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getCapabilityBytes() {
java.lang.Object ref = capability_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
capability_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getSubClusterId());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeInt64(2, lastHeartBeat_);
}
if (((bitField0_ & 0x00000004) != 0)) {
output.writeEnum(3, state_);
}
if (((bitField0_ & 0x00000008) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 4, capability_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getSubClusterId());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt64Size(2, lastHeartBeat_);
}
if (((bitField0_ & 0x00000004) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeEnumSize(3, state_);
}
if (((bitField0_ & 0x00000008) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(4, capability_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto) obj;
if (hasSubClusterId() != other.hasSubClusterId()) return false;
if (hasSubClusterId()) {
if (!getSubClusterId()
.equals(other.getSubClusterId())) return false;
}
if (hasLastHeartBeat() != other.hasLastHeartBeat()) return false;
if (hasLastHeartBeat()) {
if (getLastHeartBeat()
!= other.getLastHeartBeat()) return false;
}
if (hasState() != other.hasState()) return false;
if (hasState()) {
if (state_ != other.state_) return false;
}
if (hasCapability() != other.hasCapability()) return false;
if (hasCapability()) {
if (!getCapability()
.equals(other.getCapability())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasSubClusterId()) {
hash = (37 * hash) + SUB_CLUSTER_ID_FIELD_NUMBER;
hash = (53 * hash) + getSubClusterId().hashCode();
}
if (hasLastHeartBeat()) {
hash = (37 * hash) + LASTHEARTBEAT_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
getLastHeartBeat());
}
if (hasState()) {
hash = (37 * hash) + STATE_FIELD_NUMBER;
hash = (53 * hash) + state_;
}
if (hasCapability()) {
hash = (37 * hash) + CAPABILITY_FIELD_NUMBER;
hash = (53 * hash) + getCapability().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.SubClusterHeartbeatRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.SubClusterHeartbeatRequestProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterHeartbeatRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterHeartbeatRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getSubClusterIdFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
subClusterId_ = null;
if (subClusterIdBuilder_ != null) {
subClusterIdBuilder_.dispose();
subClusterIdBuilder_ = null;
}
lastHeartBeat_ = 0L;
state_ = 1;
capability_ = "";
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterHeartbeatRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.subClusterId_ = subClusterIdBuilder_ == null
? subClusterId_
: subClusterIdBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.lastHeartBeat_ = lastHeartBeat_;
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.state_ = state_;
to_bitField0_ |= 0x00000004;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.capability_ = capability_;
to_bitField0_ |= 0x00000008;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto.getDefaultInstance()) return this;
if (other.hasSubClusterId()) {
mergeSubClusterId(other.getSubClusterId());
}
if (other.hasLastHeartBeat()) {
setLastHeartBeat(other.getLastHeartBeat());
}
if (other.hasState()) {
setState(other.getState());
}
if (other.hasCapability()) {
capability_ = other.capability_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getSubClusterIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 16: {
lastHeartBeat_ = input.readInt64();
bitField0_ |= 0x00000002;
break;
} // case 16
case 24: {
int tmpRaw = input.readEnum();
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto tmpValue =
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto.forNumber(tmpRaw);
if (tmpValue == null) {
mergeUnknownVarintField(3, tmpRaw);
} else {
state_ = tmpRaw;
bitField0_ |= 0x00000004;
}
break;
} // case 24
case 34: {
capability_ = input.readBytes();
bitField0_ |= 0x00000008;
break;
} // case 34
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto subClusterId_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder> subClusterIdBuilder_;
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
* @return The subClusterId.
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto getSubClusterId() {
if (subClusterIdBuilder_ == null) {
return subClusterId_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance() : subClusterId_;
} else {
return subClusterIdBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
public Builder setSubClusterId(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto value) {
if (subClusterIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
subClusterId_ = value;
} else {
subClusterIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
public Builder setSubClusterId(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder builderForValue) {
if (subClusterIdBuilder_ == null) {
subClusterId_ = builderForValue.build();
} else {
subClusterIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
public Builder mergeSubClusterId(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto value) {
if (subClusterIdBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
subClusterId_ != null &&
subClusterId_ != org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance()) {
getSubClusterIdBuilder().mergeFrom(value);
} else {
subClusterId_ = value;
}
} else {
subClusterIdBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
public Builder clearSubClusterId() {
bitField0_ = (bitField0_ & ~0x00000001);
subClusterId_ = null;
if (subClusterIdBuilder_ != null) {
subClusterIdBuilder_.dispose();
subClusterIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder getSubClusterIdBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getSubClusterIdFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder getSubClusterIdOrBuilder() {
if (subClusterIdBuilder_ != null) {
return subClusterIdBuilder_.getMessageOrBuilder();
} else {
return subClusterId_ == null ?
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance() : subClusterId_;
}
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder>
getSubClusterIdFieldBuilder() {
if (subClusterIdBuilder_ == null) {
subClusterIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder>(
getSubClusterId(),
getParentForChildren(),
isClean());
subClusterId_ = null;
}
return subClusterIdBuilder_;
}
private long lastHeartBeat_ ;
/**
* optional int64 lastHeartBeat = 2;
* @return Whether the lastHeartBeat field is set.
*/
@java.lang.Override
public boolean hasLastHeartBeat() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional int64 lastHeartBeat = 2;
* @return The lastHeartBeat.
*/
@java.lang.Override
public long getLastHeartBeat() {
return lastHeartBeat_;
}
/**
* optional int64 lastHeartBeat = 2;
* @param value The lastHeartBeat to set.
* @return This builder for chaining.
*/
public Builder setLastHeartBeat(long value) {
lastHeartBeat_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* optional int64 lastHeartBeat = 2;
* @return This builder for chaining.
*/
public Builder clearLastHeartBeat() {
bitField0_ = (bitField0_ & ~0x00000002);
lastHeartBeat_ = 0L;
onChanged();
return this;
}
private int state_ = 1;
/**
* optional .hadoop.yarn.SubClusterStateProto state = 3;
* @return Whether the state field is set.
*/
@java.lang.Override public boolean hasState() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional .hadoop.yarn.SubClusterStateProto state = 3;
* @return The state.
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto getState() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto result = org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto.forNumber(state_);
return result == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto.SC_NEW : result;
}
/**
* optional .hadoop.yarn.SubClusterStateProto state = 3;
* @param value The state to set.
* @return This builder for chaining.
*/
public Builder setState(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
state_ = value.getNumber();
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterStateProto state = 3;
* @return This builder for chaining.
*/
public Builder clearState() {
bitField0_ = (bitField0_ & ~0x00000004);
state_ = 1;
onChanged();
return this;
}
private java.lang.Object capability_ = "";
/**
* optional string capability = 4;
* @return Whether the capability field is set.
*/
public boolean hasCapability() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* optional string capability = 4;
* @return The capability.
*/
public java.lang.String getCapability() {
java.lang.Object ref = capability_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
capability_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string capability = 4;
* @return The bytes for capability.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getCapabilityBytes() {
java.lang.Object ref = capability_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
capability_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string capability = 4;
* @param value The capability to set.
* @return This builder for chaining.
*/
public Builder setCapability(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
capability_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
* optional string capability = 4;
* @return This builder for chaining.
*/
public Builder clearCapability() {
capability_ = getDefaultInstance().getCapability();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
* optional string capability = 4;
* @param value The bytes for capability to set.
* @return This builder for chaining.
*/
public Builder setCapabilityBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
capability_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.SubClusterHeartbeatRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.SubClusterHeartbeatRequestProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public SubClusterHeartbeatRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface SubClusterHeartbeatResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.SubClusterHeartbeatResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.yarn.SubClusterHeartbeatResponseProto}
*/
public static final class SubClusterHeartbeatResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.SubClusterHeartbeatResponseProto)
SubClusterHeartbeatResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use SubClusterHeartbeatResponseProto.newBuilder() to construct.
private SubClusterHeartbeatResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private SubClusterHeartbeatResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new SubClusterHeartbeatResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterHeartbeatResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterHeartbeatResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.SubClusterHeartbeatResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.SubClusterHeartbeatResponseProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterHeartbeatResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterHeartbeatResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterHeartbeatResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.SubClusterHeartbeatResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.SubClusterHeartbeatResponseProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public SubClusterHeartbeatResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterHeartbeatResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface SubClusterDeregisterRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.SubClusterDeregisterRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
boolean hasSubClusterId();
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
* @return The subClusterId.
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto getSubClusterId();
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder getSubClusterIdOrBuilder();
/**
* optional .hadoop.yarn.SubClusterStateProto state = 2;
* @return Whether the state field is set.
*/
boolean hasState();
/**
* optional .hadoop.yarn.SubClusterStateProto state = 2;
* @return The state.
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto getState();
}
/**
* Protobuf type {@code hadoop.yarn.SubClusterDeregisterRequestProto}
*/
public static final class SubClusterDeregisterRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.SubClusterDeregisterRequestProto)
SubClusterDeregisterRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use SubClusterDeregisterRequestProto.newBuilder() to construct.
private SubClusterDeregisterRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private SubClusterDeregisterRequestProto() {
state_ = 1;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new SubClusterDeregisterRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterDeregisterRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterDeregisterRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto.Builder.class);
}
private int bitField0_;
public static final int SUB_CLUSTER_ID_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto subClusterId_;
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
@java.lang.Override
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
* @return The subClusterId.
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto getSubClusterId() {
return subClusterId_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance() : subClusterId_;
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder getSubClusterIdOrBuilder() {
return subClusterId_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance() : subClusterId_;
}
public static final int STATE_FIELD_NUMBER = 2;
private int state_ = 1;
/**
* optional .hadoop.yarn.SubClusterStateProto state = 2;
* @return Whether the state field is set.
*/
@java.lang.Override public boolean hasState() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional .hadoop.yarn.SubClusterStateProto state = 2;
* @return The state.
*/
@java.lang.Override public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto getState() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto result = org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto.forNumber(state_);
return result == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto.SC_NEW : result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getSubClusterId());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeEnum(2, state_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getSubClusterId());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeEnumSize(2, state_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto) obj;
if (hasSubClusterId() != other.hasSubClusterId()) return false;
if (hasSubClusterId()) {
if (!getSubClusterId()
.equals(other.getSubClusterId())) return false;
}
if (hasState() != other.hasState()) return false;
if (hasState()) {
if (state_ != other.state_) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasSubClusterId()) {
hash = (37 * hash) + SUB_CLUSTER_ID_FIELD_NUMBER;
hash = (53 * hash) + getSubClusterId().hashCode();
}
if (hasState()) {
hash = (37 * hash) + STATE_FIELD_NUMBER;
hash = (53 * hash) + state_;
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.SubClusterDeregisterRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.SubClusterDeregisterRequestProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterDeregisterRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterDeregisterRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getSubClusterIdFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
subClusterId_ = null;
if (subClusterIdBuilder_ != null) {
subClusterIdBuilder_.dispose();
subClusterIdBuilder_ = null;
}
state_ = 1;
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterDeregisterRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.subClusterId_ = subClusterIdBuilder_ == null
? subClusterId_
: subClusterIdBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.state_ = state_;
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto.getDefaultInstance()) return this;
if (other.hasSubClusterId()) {
mergeSubClusterId(other.getSubClusterId());
}
if (other.hasState()) {
setState(other.getState());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getSubClusterIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 16: {
int tmpRaw = input.readEnum();
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto tmpValue =
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto.forNumber(tmpRaw);
if (tmpValue == null) {
mergeUnknownVarintField(2, tmpRaw);
} else {
state_ = tmpRaw;
bitField0_ |= 0x00000002;
}
break;
} // case 16
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto subClusterId_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder> subClusterIdBuilder_;
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
* @return The subClusterId.
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto getSubClusterId() {
if (subClusterIdBuilder_ == null) {
return subClusterId_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance() : subClusterId_;
} else {
return subClusterIdBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
public Builder setSubClusterId(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto value) {
if (subClusterIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
subClusterId_ = value;
} else {
subClusterIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
public Builder setSubClusterId(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder builderForValue) {
if (subClusterIdBuilder_ == null) {
subClusterId_ = builderForValue.build();
} else {
subClusterIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
public Builder mergeSubClusterId(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto value) {
if (subClusterIdBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
subClusterId_ != null &&
subClusterId_ != org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance()) {
getSubClusterIdBuilder().mergeFrom(value);
} else {
subClusterId_ = value;
}
} else {
subClusterIdBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
public Builder clearSubClusterId() {
bitField0_ = (bitField0_ & ~0x00000001);
subClusterId_ = null;
if (subClusterIdBuilder_ != null) {
subClusterIdBuilder_.dispose();
subClusterIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder getSubClusterIdBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getSubClusterIdFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder getSubClusterIdOrBuilder() {
if (subClusterIdBuilder_ != null) {
return subClusterIdBuilder_.getMessageOrBuilder();
} else {
return subClusterId_ == null ?
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance() : subClusterId_;
}
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder>
getSubClusterIdFieldBuilder() {
if (subClusterIdBuilder_ == null) {
subClusterIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder>(
getSubClusterId(),
getParentForChildren(),
isClean());
subClusterId_ = null;
}
return subClusterIdBuilder_;
}
private int state_ = 1;
/**
* optional .hadoop.yarn.SubClusterStateProto state = 2;
* @return Whether the state field is set.
*/
@java.lang.Override public boolean hasState() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional .hadoop.yarn.SubClusterStateProto state = 2;
* @return The state.
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto getState() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto result = org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto.forNumber(state_);
return result == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto.SC_NEW : result;
}
/**
* optional .hadoop.yarn.SubClusterStateProto state = 2;
* @param value The state to set.
* @return This builder for chaining.
*/
public Builder setState(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
state_ = value.getNumber();
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterStateProto state = 2;
* @return This builder for chaining.
*/
public Builder clearState() {
bitField0_ = (bitField0_ & ~0x00000002);
state_ = 1;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.SubClusterDeregisterRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.SubClusterDeregisterRequestProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public SubClusterDeregisterRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface SubClusterDeregisterResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.SubClusterDeregisterResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.yarn.SubClusterDeregisterResponseProto}
*/
public static final class SubClusterDeregisterResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.SubClusterDeregisterResponseProto)
SubClusterDeregisterResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use SubClusterDeregisterResponseProto.newBuilder() to construct.
private SubClusterDeregisterResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private SubClusterDeregisterResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new SubClusterDeregisterResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterDeregisterResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterDeregisterResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.SubClusterDeregisterResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.SubClusterDeregisterResponseProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterDeregisterResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterDeregisterResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterDeregisterResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.SubClusterDeregisterResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.SubClusterDeregisterResponseProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public SubClusterDeregisterResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterDeregisterResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetSubClusterInfoRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.GetSubClusterInfoRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
boolean hasSubClusterId();
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
* @return The subClusterId.
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto getSubClusterId();
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder getSubClusterIdOrBuilder();
}
/**
* Protobuf type {@code hadoop.yarn.GetSubClusterInfoRequestProto}
*/
public static final class GetSubClusterInfoRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.GetSubClusterInfoRequestProto)
GetSubClusterInfoRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetSubClusterInfoRequestProto.newBuilder() to construct.
private GetSubClusterInfoRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetSubClusterInfoRequestProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetSubClusterInfoRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetSubClusterInfoRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetSubClusterInfoRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto.Builder.class);
}
private int bitField0_;
public static final int SUB_CLUSTER_ID_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto subClusterId_;
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
@java.lang.Override
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
* @return The subClusterId.
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto getSubClusterId() {
return subClusterId_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance() : subClusterId_;
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder getSubClusterIdOrBuilder() {
return subClusterId_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance() : subClusterId_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getSubClusterId());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getSubClusterId());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto) obj;
if (hasSubClusterId() != other.hasSubClusterId()) return false;
if (hasSubClusterId()) {
if (!getSubClusterId()
.equals(other.getSubClusterId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasSubClusterId()) {
hash = (37 * hash) + SUB_CLUSTER_ID_FIELD_NUMBER;
hash = (53 * hash) + getSubClusterId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.GetSubClusterInfoRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.GetSubClusterInfoRequestProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetSubClusterInfoRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetSubClusterInfoRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getSubClusterIdFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
subClusterId_ = null;
if (subClusterIdBuilder_ != null) {
subClusterIdBuilder_.dispose();
subClusterIdBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetSubClusterInfoRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.subClusterId_ = subClusterIdBuilder_ == null
? subClusterId_
: subClusterIdBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto.getDefaultInstance()) return this;
if (other.hasSubClusterId()) {
mergeSubClusterId(other.getSubClusterId());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getSubClusterIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto subClusterId_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder> subClusterIdBuilder_;
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
* @return The subClusterId.
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto getSubClusterId() {
if (subClusterIdBuilder_ == null) {
return subClusterId_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance() : subClusterId_;
} else {
return subClusterIdBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
public Builder setSubClusterId(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto value) {
if (subClusterIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
subClusterId_ = value;
} else {
subClusterIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
public Builder setSubClusterId(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder builderForValue) {
if (subClusterIdBuilder_ == null) {
subClusterId_ = builderForValue.build();
} else {
subClusterIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
public Builder mergeSubClusterId(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto value) {
if (subClusterIdBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
subClusterId_ != null &&
subClusterId_ != org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance()) {
getSubClusterIdBuilder().mergeFrom(value);
} else {
subClusterId_ = value;
}
} else {
subClusterIdBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
public Builder clearSubClusterId() {
bitField0_ = (bitField0_ & ~0x00000001);
subClusterId_ = null;
if (subClusterIdBuilder_ != null) {
subClusterIdBuilder_.dispose();
subClusterIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder getSubClusterIdBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getSubClusterIdFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder getSubClusterIdOrBuilder() {
if (subClusterIdBuilder_ != null) {
return subClusterIdBuilder_.getMessageOrBuilder();
} else {
return subClusterId_ == null ?
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance() : subClusterId_;
}
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder>
getSubClusterIdFieldBuilder() {
if (subClusterIdBuilder_ == null) {
subClusterIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder>(
getSubClusterId(),
getParentForChildren(),
isClean());
subClusterId_ = null;
}
return subClusterIdBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.GetSubClusterInfoRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.GetSubClusterInfoRequestProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetSubClusterInfoRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetSubClusterInfoResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.GetSubClusterInfoResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.SubClusterInfoProto sub_cluster_info = 1;
* @return Whether the subClusterInfo field is set.
*/
boolean hasSubClusterInfo();
/**
* optional .hadoop.yarn.SubClusterInfoProto sub_cluster_info = 1;
* @return The subClusterInfo.
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto getSubClusterInfo();
/**
* optional .hadoop.yarn.SubClusterInfoProto sub_cluster_info = 1;
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProtoOrBuilder getSubClusterInfoOrBuilder();
}
/**
* Protobuf type {@code hadoop.yarn.GetSubClusterInfoResponseProto}
*/
public static final class GetSubClusterInfoResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.GetSubClusterInfoResponseProto)
GetSubClusterInfoResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetSubClusterInfoResponseProto.newBuilder() to construct.
private GetSubClusterInfoResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetSubClusterInfoResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetSubClusterInfoResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetSubClusterInfoResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetSubClusterInfoResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto.Builder.class);
}
private int bitField0_;
public static final int SUB_CLUSTER_INFO_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto subClusterInfo_;
/**
* optional .hadoop.yarn.SubClusterInfoProto sub_cluster_info = 1;
* @return Whether the subClusterInfo field is set.
*/
@java.lang.Override
public boolean hasSubClusterInfo() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.SubClusterInfoProto sub_cluster_info = 1;
* @return The subClusterInfo.
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto getSubClusterInfo() {
return subClusterInfo_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.getDefaultInstance() : subClusterInfo_;
}
/**
* optional .hadoop.yarn.SubClusterInfoProto sub_cluster_info = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProtoOrBuilder getSubClusterInfoOrBuilder() {
return subClusterInfo_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.getDefaultInstance() : subClusterInfo_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getSubClusterInfo());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getSubClusterInfo());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto) obj;
if (hasSubClusterInfo() != other.hasSubClusterInfo()) return false;
if (hasSubClusterInfo()) {
if (!getSubClusterInfo()
.equals(other.getSubClusterInfo())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasSubClusterInfo()) {
hash = (37 * hash) + SUB_CLUSTER_INFO_FIELD_NUMBER;
hash = (53 * hash) + getSubClusterInfo().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.GetSubClusterInfoResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.GetSubClusterInfoResponseProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetSubClusterInfoResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetSubClusterInfoResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getSubClusterInfoFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
subClusterInfo_ = null;
if (subClusterInfoBuilder_ != null) {
subClusterInfoBuilder_.dispose();
subClusterInfoBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetSubClusterInfoResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.subClusterInfo_ = subClusterInfoBuilder_ == null
? subClusterInfo_
: subClusterInfoBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto.getDefaultInstance()) return this;
if (other.hasSubClusterInfo()) {
mergeSubClusterInfo(other.getSubClusterInfo());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getSubClusterInfoFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto subClusterInfo_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProtoOrBuilder> subClusterInfoBuilder_;
/**
* optional .hadoop.yarn.SubClusterInfoProto sub_cluster_info = 1;
* @return Whether the subClusterInfo field is set.
*/
public boolean hasSubClusterInfo() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.SubClusterInfoProto sub_cluster_info = 1;
* @return The subClusterInfo.
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto getSubClusterInfo() {
if (subClusterInfoBuilder_ == null) {
return subClusterInfo_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.getDefaultInstance() : subClusterInfo_;
} else {
return subClusterInfoBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.SubClusterInfoProto sub_cluster_info = 1;
*/
public Builder setSubClusterInfo(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto value) {
if (subClusterInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
subClusterInfo_ = value;
} else {
subClusterInfoBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterInfoProto sub_cluster_info = 1;
*/
public Builder setSubClusterInfo(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.Builder builderForValue) {
if (subClusterInfoBuilder_ == null) {
subClusterInfo_ = builderForValue.build();
} else {
subClusterInfoBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterInfoProto sub_cluster_info = 1;
*/
public Builder mergeSubClusterInfo(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto value) {
if (subClusterInfoBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
subClusterInfo_ != null &&
subClusterInfo_ != org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.getDefaultInstance()) {
getSubClusterInfoBuilder().mergeFrom(value);
} else {
subClusterInfo_ = value;
}
} else {
subClusterInfoBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterInfoProto sub_cluster_info = 1;
*/
public Builder clearSubClusterInfo() {
bitField0_ = (bitField0_ & ~0x00000001);
subClusterInfo_ = null;
if (subClusterInfoBuilder_ != null) {
subClusterInfoBuilder_.dispose();
subClusterInfoBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterInfoProto sub_cluster_info = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.Builder getSubClusterInfoBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getSubClusterInfoFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.SubClusterInfoProto sub_cluster_info = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProtoOrBuilder getSubClusterInfoOrBuilder() {
if (subClusterInfoBuilder_ != null) {
return subClusterInfoBuilder_.getMessageOrBuilder();
} else {
return subClusterInfo_ == null ?
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.getDefaultInstance() : subClusterInfo_;
}
}
/**
* optional .hadoop.yarn.SubClusterInfoProto sub_cluster_info = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProtoOrBuilder>
getSubClusterInfoFieldBuilder() {
if (subClusterInfoBuilder_ == null) {
subClusterInfoBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProtoOrBuilder>(
getSubClusterInfo(),
getParentForChildren(),
isClean());
subClusterInfo_ = null;
}
return subClusterInfoBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.GetSubClusterInfoResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.GetSubClusterInfoResponseProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetSubClusterInfoResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterInfoResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetSubClustersInfoRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.GetSubClustersInfoRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional bool filter_inactive_subclusters = 1 [default = true];
* @return Whether the filterInactiveSubclusters field is set.
*/
boolean hasFilterInactiveSubclusters();
/**
* optional bool filter_inactive_subclusters = 1 [default = true];
* @return The filterInactiveSubclusters.
*/
boolean getFilterInactiveSubclusters();
}
/**
* Protobuf type {@code hadoop.yarn.GetSubClustersInfoRequestProto}
*/
public static final class GetSubClustersInfoRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.GetSubClustersInfoRequestProto)
GetSubClustersInfoRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetSubClustersInfoRequestProto.newBuilder() to construct.
private GetSubClustersInfoRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetSubClustersInfoRequestProto() {
filterInactiveSubclusters_ = true;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetSubClustersInfoRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetSubClustersInfoRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetSubClustersInfoRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto.Builder.class);
}
private int bitField0_;
public static final int FILTER_INACTIVE_SUBCLUSTERS_FIELD_NUMBER = 1;
private boolean filterInactiveSubclusters_ = true;
/**
* optional bool filter_inactive_subclusters = 1 [default = true];
* @return Whether the filterInactiveSubclusters field is set.
*/
@java.lang.Override
public boolean hasFilterInactiveSubclusters() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional bool filter_inactive_subclusters = 1 [default = true];
* @return The filterInactiveSubclusters.
*/
@java.lang.Override
public boolean getFilterInactiveSubclusters() {
return filterInactiveSubclusters_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeBool(1, filterInactiveSubclusters_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeBoolSize(1, filterInactiveSubclusters_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto) obj;
if (hasFilterInactiveSubclusters() != other.hasFilterInactiveSubclusters()) return false;
if (hasFilterInactiveSubclusters()) {
if (getFilterInactiveSubclusters()
!= other.getFilterInactiveSubclusters()) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasFilterInactiveSubclusters()) {
hash = (37 * hash) + FILTER_INACTIVE_SUBCLUSTERS_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean(
getFilterInactiveSubclusters());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.GetSubClustersInfoRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.GetSubClustersInfoRequestProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetSubClustersInfoRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetSubClustersInfoRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
filterInactiveSubclusters_ = true;
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetSubClustersInfoRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.filterInactiveSubclusters_ = filterInactiveSubclusters_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto.getDefaultInstance()) return this;
if (other.hasFilterInactiveSubclusters()) {
setFilterInactiveSubclusters(other.getFilterInactiveSubclusters());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
filterInactiveSubclusters_ = input.readBool();
bitField0_ |= 0x00000001;
break;
} // case 8
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private boolean filterInactiveSubclusters_ = true;
/**
* optional bool filter_inactive_subclusters = 1 [default = true];
* @return Whether the filterInactiveSubclusters field is set.
*/
@java.lang.Override
public boolean hasFilterInactiveSubclusters() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional bool filter_inactive_subclusters = 1 [default = true];
* @return The filterInactiveSubclusters.
*/
@java.lang.Override
public boolean getFilterInactiveSubclusters() {
return filterInactiveSubclusters_;
}
/**
* optional bool filter_inactive_subclusters = 1 [default = true];
* @param value The filterInactiveSubclusters to set.
* @return This builder for chaining.
*/
public Builder setFilterInactiveSubclusters(boolean value) {
filterInactiveSubclusters_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional bool filter_inactive_subclusters = 1 [default = true];
* @return This builder for chaining.
*/
public Builder clearFilterInactiveSubclusters() {
bitField0_ = (bitField0_ & ~0x00000001);
filterInactiveSubclusters_ = true;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.GetSubClustersInfoRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.GetSubClustersInfoRequestProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetSubClustersInfoRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetSubClustersInfoResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.GetSubClustersInfoResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* repeated .hadoop.yarn.SubClusterInfoProto sub_cluster_infos = 1;
*/
java.util.List
getSubClusterInfosList();
/**
* repeated .hadoop.yarn.SubClusterInfoProto sub_cluster_infos = 1;
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto getSubClusterInfos(int index);
/**
* repeated .hadoop.yarn.SubClusterInfoProto sub_cluster_infos = 1;
*/
int getSubClusterInfosCount();
/**
* repeated .hadoop.yarn.SubClusterInfoProto sub_cluster_infos = 1;
*/
java.util.List extends org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProtoOrBuilder>
getSubClusterInfosOrBuilderList();
/**
* repeated .hadoop.yarn.SubClusterInfoProto sub_cluster_infos = 1;
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProtoOrBuilder getSubClusterInfosOrBuilder(
int index);
}
/**
* Protobuf type {@code hadoop.yarn.GetSubClustersInfoResponseProto}
*/
public static final class GetSubClustersInfoResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.GetSubClustersInfoResponseProto)
GetSubClustersInfoResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetSubClustersInfoResponseProto.newBuilder() to construct.
private GetSubClustersInfoResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetSubClustersInfoResponseProto() {
subClusterInfos_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetSubClustersInfoResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetSubClustersInfoResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetSubClustersInfoResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto.Builder.class);
}
public static final int SUB_CLUSTER_INFOS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List subClusterInfos_;
/**
* repeated .hadoop.yarn.SubClusterInfoProto sub_cluster_infos = 1;
*/
@java.lang.Override
public java.util.List getSubClusterInfosList() {
return subClusterInfos_;
}
/**
* repeated .hadoop.yarn.SubClusterInfoProto sub_cluster_infos = 1;
*/
@java.lang.Override
public java.util.List extends org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProtoOrBuilder>
getSubClusterInfosOrBuilderList() {
return subClusterInfos_;
}
/**
* repeated .hadoop.yarn.SubClusterInfoProto sub_cluster_infos = 1;
*/
@java.lang.Override
public int getSubClusterInfosCount() {
return subClusterInfos_.size();
}
/**
* repeated .hadoop.yarn.SubClusterInfoProto sub_cluster_infos = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto getSubClusterInfos(int index) {
return subClusterInfos_.get(index);
}
/**
* repeated .hadoop.yarn.SubClusterInfoProto sub_cluster_infos = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProtoOrBuilder getSubClusterInfosOrBuilder(
int index) {
return subClusterInfos_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < subClusterInfos_.size(); i++) {
output.writeMessage(1, subClusterInfos_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < subClusterInfos_.size(); i++) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, subClusterInfos_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto) obj;
if (!getSubClusterInfosList()
.equals(other.getSubClusterInfosList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getSubClusterInfosCount() > 0) {
hash = (37 * hash) + SUB_CLUSTER_INFOS_FIELD_NUMBER;
hash = (53 * hash) + getSubClusterInfosList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.GetSubClustersInfoResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.GetSubClustersInfoResponseProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetSubClustersInfoResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetSubClustersInfoResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (subClusterInfosBuilder_ == null) {
subClusterInfos_ = java.util.Collections.emptyList();
} else {
subClusterInfos_ = null;
subClusterInfosBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetSubClustersInfoResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto result) {
if (subClusterInfosBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
subClusterInfos_ = java.util.Collections.unmodifiableList(subClusterInfos_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.subClusterInfos_ = subClusterInfos_;
} else {
result.subClusterInfos_ = subClusterInfosBuilder_.build();
}
}
private void buildPartial0(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto.getDefaultInstance()) return this;
if (subClusterInfosBuilder_ == null) {
if (!other.subClusterInfos_.isEmpty()) {
if (subClusterInfos_.isEmpty()) {
subClusterInfos_ = other.subClusterInfos_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureSubClusterInfosIsMutable();
subClusterInfos_.addAll(other.subClusterInfos_);
}
onChanged();
}
} else {
if (!other.subClusterInfos_.isEmpty()) {
if (subClusterInfosBuilder_.isEmpty()) {
subClusterInfosBuilder_.dispose();
subClusterInfosBuilder_ = null;
subClusterInfos_ = other.subClusterInfos_;
bitField0_ = (bitField0_ & ~0x00000001);
subClusterInfosBuilder_ =
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getSubClusterInfosFieldBuilder() : null;
} else {
subClusterInfosBuilder_.addAllMessages(other.subClusterInfos_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto m =
input.readMessage(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.PARSER,
extensionRegistry);
if (subClusterInfosBuilder_ == null) {
ensureSubClusterInfosIsMutable();
subClusterInfos_.add(m);
} else {
subClusterInfosBuilder_.addMessage(m);
}
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List subClusterInfos_ =
java.util.Collections.emptyList();
private void ensureSubClusterInfosIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
subClusterInfos_ = new java.util.ArrayList(subClusterInfos_);
bitField0_ |= 0x00000001;
}
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProtoOrBuilder> subClusterInfosBuilder_;
/**
* repeated .hadoop.yarn.SubClusterInfoProto sub_cluster_infos = 1;
*/
public java.util.List getSubClusterInfosList() {
if (subClusterInfosBuilder_ == null) {
return java.util.Collections.unmodifiableList(subClusterInfos_);
} else {
return subClusterInfosBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.yarn.SubClusterInfoProto sub_cluster_infos = 1;
*/
public int getSubClusterInfosCount() {
if (subClusterInfosBuilder_ == null) {
return subClusterInfos_.size();
} else {
return subClusterInfosBuilder_.getCount();
}
}
/**
* repeated .hadoop.yarn.SubClusterInfoProto sub_cluster_infos = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto getSubClusterInfos(int index) {
if (subClusterInfosBuilder_ == null) {
return subClusterInfos_.get(index);
} else {
return subClusterInfosBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.yarn.SubClusterInfoProto sub_cluster_infos = 1;
*/
public Builder setSubClusterInfos(
int index, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto value) {
if (subClusterInfosBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSubClusterInfosIsMutable();
subClusterInfos_.set(index, value);
onChanged();
} else {
subClusterInfosBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.SubClusterInfoProto sub_cluster_infos = 1;
*/
public Builder setSubClusterInfos(
int index, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.Builder builderForValue) {
if (subClusterInfosBuilder_ == null) {
ensureSubClusterInfosIsMutable();
subClusterInfos_.set(index, builderForValue.build());
onChanged();
} else {
subClusterInfosBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.SubClusterInfoProto sub_cluster_infos = 1;
*/
public Builder addSubClusterInfos(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto value) {
if (subClusterInfosBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSubClusterInfosIsMutable();
subClusterInfos_.add(value);
onChanged();
} else {
subClusterInfosBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.yarn.SubClusterInfoProto sub_cluster_infos = 1;
*/
public Builder addSubClusterInfos(
int index, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto value) {
if (subClusterInfosBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSubClusterInfosIsMutable();
subClusterInfos_.add(index, value);
onChanged();
} else {
subClusterInfosBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.SubClusterInfoProto sub_cluster_infos = 1;
*/
public Builder addSubClusterInfos(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.Builder builderForValue) {
if (subClusterInfosBuilder_ == null) {
ensureSubClusterInfosIsMutable();
subClusterInfos_.add(builderForValue.build());
onChanged();
} else {
subClusterInfosBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.SubClusterInfoProto sub_cluster_infos = 1;
*/
public Builder addSubClusterInfos(
int index, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.Builder builderForValue) {
if (subClusterInfosBuilder_ == null) {
ensureSubClusterInfosIsMutable();
subClusterInfos_.add(index, builderForValue.build());
onChanged();
} else {
subClusterInfosBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.SubClusterInfoProto sub_cluster_infos = 1;
*/
public Builder addAllSubClusterInfos(
java.lang.Iterable extends org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto> values) {
if (subClusterInfosBuilder_ == null) {
ensureSubClusterInfosIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, subClusterInfos_);
onChanged();
} else {
subClusterInfosBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.yarn.SubClusterInfoProto sub_cluster_infos = 1;
*/
public Builder clearSubClusterInfos() {
if (subClusterInfosBuilder_ == null) {
subClusterInfos_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
subClusterInfosBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.yarn.SubClusterInfoProto sub_cluster_infos = 1;
*/
public Builder removeSubClusterInfos(int index) {
if (subClusterInfosBuilder_ == null) {
ensureSubClusterInfosIsMutable();
subClusterInfos_.remove(index);
onChanged();
} else {
subClusterInfosBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.yarn.SubClusterInfoProto sub_cluster_infos = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.Builder getSubClusterInfosBuilder(
int index) {
return getSubClusterInfosFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.yarn.SubClusterInfoProto sub_cluster_infos = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProtoOrBuilder getSubClusterInfosOrBuilder(
int index) {
if (subClusterInfosBuilder_ == null) {
return subClusterInfos_.get(index); } else {
return subClusterInfosBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.yarn.SubClusterInfoProto sub_cluster_infos = 1;
*/
public java.util.List extends org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProtoOrBuilder>
getSubClusterInfosOrBuilderList() {
if (subClusterInfosBuilder_ != null) {
return subClusterInfosBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(subClusterInfos_);
}
}
/**
* repeated .hadoop.yarn.SubClusterInfoProto sub_cluster_infos = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.Builder addSubClusterInfosBuilder() {
return getSubClusterInfosFieldBuilder().addBuilder(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.SubClusterInfoProto sub_cluster_infos = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.Builder addSubClusterInfosBuilder(
int index) {
return getSubClusterInfosFieldBuilder().addBuilder(
index, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.SubClusterInfoProto sub_cluster_infos = 1;
*/
public java.util.List
getSubClusterInfosBuilderList() {
return getSubClusterInfosFieldBuilder().getBuilderList();
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProtoOrBuilder>
getSubClusterInfosFieldBuilder() {
if (subClusterInfosBuilder_ == null) {
subClusterInfosBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProtoOrBuilder>(
subClusterInfos_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
subClusterInfos_ = null;
}
return subClusterInfosBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.GetSubClustersInfoResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.GetSubClustersInfoResponseProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetSubClustersInfoResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClustersInfoResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface ApplicationHomeSubClusterProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.ApplicationHomeSubClusterProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
* @return Whether the applicationId field is set.
*/
boolean hasApplicationId();
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
* @return The applicationId.
*/
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId();
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder();
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 2;
* @return Whether the homeSubCluster field is set.
*/
boolean hasHomeSubCluster();
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 2;
* @return The homeSubCluster.
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto getHomeSubCluster();
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 2;
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder getHomeSubClusterOrBuilder();
/**
* optional int64 create_time = 3;
* @return Whether the createTime field is set.
*/
boolean hasCreateTime();
/**
* optional int64 create_time = 3;
* @return The createTime.
*/
long getCreateTime();
/**
* optional .hadoop.yarn.ApplicationSubmissionContextProto app_submit_context = 4;
* @return Whether the appSubmitContext field is set.
*/
boolean hasAppSubmitContext();
/**
* optional .hadoop.yarn.ApplicationSubmissionContextProto app_submit_context = 4;
* @return The appSubmitContext.
*/
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto getAppSubmitContext();
/**
* optional .hadoop.yarn.ApplicationSubmissionContextProto app_submit_context = 4;
*/
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProtoOrBuilder getAppSubmitContextOrBuilder();
}
/**
* Protobuf type {@code hadoop.yarn.ApplicationHomeSubClusterProto}
*/
public static final class ApplicationHomeSubClusterProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.ApplicationHomeSubClusterProto)
ApplicationHomeSubClusterProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use ApplicationHomeSubClusterProto.newBuilder() to construct.
private ApplicationHomeSubClusterProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private ApplicationHomeSubClusterProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ApplicationHomeSubClusterProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_ApplicationHomeSubClusterProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_ApplicationHomeSubClusterProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.Builder.class);
}
private int bitField0_;
public static final int APPLICATION_ID_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
* @return Whether the applicationId field is set.
*/
@java.lang.Override
public boolean hasApplicationId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
* @return The applicationId.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
}
public static final int HOME_SUB_CLUSTER_FIELD_NUMBER = 2;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto homeSubCluster_;
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 2;
* @return Whether the homeSubCluster field is set.
*/
@java.lang.Override
public boolean hasHomeSubCluster() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 2;
* @return The homeSubCluster.
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto getHomeSubCluster() {
return homeSubCluster_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance() : homeSubCluster_;
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 2;
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder getHomeSubClusterOrBuilder() {
return homeSubCluster_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance() : homeSubCluster_;
}
public static final int CREATE_TIME_FIELD_NUMBER = 3;
private long createTime_ = 0L;
/**
* optional int64 create_time = 3;
* @return Whether the createTime field is set.
*/
@java.lang.Override
public boolean hasCreateTime() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional int64 create_time = 3;
* @return The createTime.
*/
@java.lang.Override
public long getCreateTime() {
return createTime_;
}
public static final int APP_SUBMIT_CONTEXT_FIELD_NUMBER = 4;
private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto appSubmitContext_;
/**
* optional .hadoop.yarn.ApplicationSubmissionContextProto app_submit_context = 4;
* @return Whether the appSubmitContext field is set.
*/
@java.lang.Override
public boolean hasAppSubmitContext() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* optional .hadoop.yarn.ApplicationSubmissionContextProto app_submit_context = 4;
* @return The appSubmitContext.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto getAppSubmitContext() {
return appSubmitContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.getDefaultInstance() : appSubmitContext_;
}
/**
* optional .hadoop.yarn.ApplicationSubmissionContextProto app_submit_context = 4;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProtoOrBuilder getAppSubmitContextOrBuilder() {
return appSubmitContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.getDefaultInstance() : appSubmitContext_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (hasAppSubmitContext()) {
if (!getAppSubmitContext().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getApplicationId());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getHomeSubCluster());
}
if (((bitField0_ & 0x00000004) != 0)) {
output.writeInt64(3, createTime_);
}
if (((bitField0_ & 0x00000008) != 0)) {
output.writeMessage(4, getAppSubmitContext());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getApplicationId());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(2, getHomeSubCluster());
}
if (((bitField0_ & 0x00000004) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt64Size(3, createTime_);
}
if (((bitField0_ & 0x00000008) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(4, getAppSubmitContext());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto) obj;
if (hasApplicationId() != other.hasApplicationId()) return false;
if (hasApplicationId()) {
if (!getApplicationId()
.equals(other.getApplicationId())) return false;
}
if (hasHomeSubCluster() != other.hasHomeSubCluster()) return false;
if (hasHomeSubCluster()) {
if (!getHomeSubCluster()
.equals(other.getHomeSubCluster())) return false;
}
if (hasCreateTime() != other.hasCreateTime()) return false;
if (hasCreateTime()) {
if (getCreateTime()
!= other.getCreateTime()) return false;
}
if (hasAppSubmitContext() != other.hasAppSubmitContext()) return false;
if (hasAppSubmitContext()) {
if (!getAppSubmitContext()
.equals(other.getAppSubmitContext())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasApplicationId()) {
hash = (37 * hash) + APPLICATION_ID_FIELD_NUMBER;
hash = (53 * hash) + getApplicationId().hashCode();
}
if (hasHomeSubCluster()) {
hash = (37 * hash) + HOME_SUB_CLUSTER_FIELD_NUMBER;
hash = (53 * hash) + getHomeSubCluster().hashCode();
}
if (hasCreateTime()) {
hash = (37 * hash) + CREATE_TIME_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
getCreateTime());
}
if (hasAppSubmitContext()) {
hash = (37 * hash) + APP_SUBMIT_CONTEXT_FIELD_NUMBER;
hash = (53 * hash) + getAppSubmitContext().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.ApplicationHomeSubClusterProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.ApplicationHomeSubClusterProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_ApplicationHomeSubClusterProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_ApplicationHomeSubClusterProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getApplicationIdFieldBuilder();
getHomeSubClusterFieldBuilder();
getAppSubmitContextFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
applicationId_ = null;
if (applicationIdBuilder_ != null) {
applicationIdBuilder_.dispose();
applicationIdBuilder_ = null;
}
homeSubCluster_ = null;
if (homeSubClusterBuilder_ != null) {
homeSubClusterBuilder_.dispose();
homeSubClusterBuilder_ = null;
}
createTime_ = 0L;
appSubmitContext_ = null;
if (appSubmitContextBuilder_ != null) {
appSubmitContextBuilder_.dispose();
appSubmitContextBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_ApplicationHomeSubClusterProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.applicationId_ = applicationIdBuilder_ == null
? applicationId_
: applicationIdBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.homeSubCluster_ = homeSubClusterBuilder_ == null
? homeSubCluster_
: homeSubClusterBuilder_.build();
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.createTime_ = createTime_;
to_bitField0_ |= 0x00000004;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.appSubmitContext_ = appSubmitContextBuilder_ == null
? appSubmitContext_
: appSubmitContextBuilder_.build();
to_bitField0_ |= 0x00000008;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.getDefaultInstance()) return this;
if (other.hasApplicationId()) {
mergeApplicationId(other.getApplicationId());
}
if (other.hasHomeSubCluster()) {
mergeHomeSubCluster(other.getHomeSubCluster());
}
if (other.hasCreateTime()) {
setCreateTime(other.getCreateTime());
}
if (other.hasAppSubmitContext()) {
mergeAppSubmitContext(other.getAppSubmitContext());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (hasAppSubmitContext()) {
if (!getAppSubmitContext().isInitialized()) {
return false;
}
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getApplicationIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
input.readMessage(
getHomeSubClusterFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 24: {
createTime_ = input.readInt64();
bitField0_ |= 0x00000004;
break;
} // case 24
case 34: {
input.readMessage(
getAppSubmitContextFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000008;
break;
} // case 34
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> applicationIdBuilder_;
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
* @return Whether the applicationId field is set.
*/
public boolean hasApplicationId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
* @return The applicationId.
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
if (applicationIdBuilder_ == null) {
return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
} else {
return applicationIdBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
public Builder setApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
if (applicationIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
applicationId_ = value;
} else {
applicationIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
public Builder setApplicationId(
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) {
if (applicationIdBuilder_ == null) {
applicationId_ = builderForValue.build();
} else {
applicationIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
public Builder mergeApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
if (applicationIdBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
applicationId_ != null &&
applicationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) {
getApplicationIdBuilder().mergeFrom(value);
} else {
applicationId_ = value;
}
} else {
applicationIdBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
public Builder clearApplicationId() {
bitField0_ = (bitField0_ & ~0x00000001);
applicationId_ = null;
if (applicationIdBuilder_ != null) {
applicationIdBuilder_.dispose();
applicationIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getApplicationIdBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getApplicationIdFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
if (applicationIdBuilder_ != null) {
return applicationIdBuilder_.getMessageOrBuilder();
} else {
return applicationId_ == null ?
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
}
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>
getApplicationIdFieldBuilder() {
if (applicationIdBuilder_ == null) {
applicationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>(
getApplicationId(),
getParentForChildren(),
isClean());
applicationId_ = null;
}
return applicationIdBuilder_;
}
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto homeSubCluster_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder> homeSubClusterBuilder_;
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 2;
* @return Whether the homeSubCluster field is set.
*/
public boolean hasHomeSubCluster() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 2;
* @return The homeSubCluster.
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto getHomeSubCluster() {
if (homeSubClusterBuilder_ == null) {
return homeSubCluster_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance() : homeSubCluster_;
} else {
return homeSubClusterBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 2;
*/
public Builder setHomeSubCluster(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto value) {
if (homeSubClusterBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
homeSubCluster_ = value;
} else {
homeSubClusterBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 2;
*/
public Builder setHomeSubCluster(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder builderForValue) {
if (homeSubClusterBuilder_ == null) {
homeSubCluster_ = builderForValue.build();
} else {
homeSubClusterBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 2;
*/
public Builder mergeHomeSubCluster(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto value) {
if (homeSubClusterBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0) &&
homeSubCluster_ != null &&
homeSubCluster_ != org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance()) {
getHomeSubClusterBuilder().mergeFrom(value);
} else {
homeSubCluster_ = value;
}
} else {
homeSubClusterBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 2;
*/
public Builder clearHomeSubCluster() {
bitField0_ = (bitField0_ & ~0x00000002);
homeSubCluster_ = null;
if (homeSubClusterBuilder_ != null) {
homeSubClusterBuilder_.dispose();
homeSubClusterBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 2;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder getHomeSubClusterBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getHomeSubClusterFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 2;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder getHomeSubClusterOrBuilder() {
if (homeSubClusterBuilder_ != null) {
return homeSubClusterBuilder_.getMessageOrBuilder();
} else {
return homeSubCluster_ == null ?
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance() : homeSubCluster_;
}
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 2;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder>
getHomeSubClusterFieldBuilder() {
if (homeSubClusterBuilder_ == null) {
homeSubClusterBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder>(
getHomeSubCluster(),
getParentForChildren(),
isClean());
homeSubCluster_ = null;
}
return homeSubClusterBuilder_;
}
private long createTime_ ;
/**
* optional int64 create_time = 3;
* @return Whether the createTime field is set.
*/
@java.lang.Override
public boolean hasCreateTime() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional int64 create_time = 3;
* @return The createTime.
*/
@java.lang.Override
public long getCreateTime() {
return createTime_;
}
/**
* optional int64 create_time = 3;
* @param value The createTime to set.
* @return This builder for chaining.
*/
public Builder setCreateTime(long value) {
createTime_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* optional int64 create_time = 3;
* @return This builder for chaining.
*/
public Builder clearCreateTime() {
bitField0_ = (bitField0_ & ~0x00000004);
createTime_ = 0L;
onChanged();
return this;
}
private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto appSubmitContext_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProtoOrBuilder> appSubmitContextBuilder_;
/**
* optional .hadoop.yarn.ApplicationSubmissionContextProto app_submit_context = 4;
* @return Whether the appSubmitContext field is set.
*/
public boolean hasAppSubmitContext() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* optional .hadoop.yarn.ApplicationSubmissionContextProto app_submit_context = 4;
* @return The appSubmitContext.
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto getAppSubmitContext() {
if (appSubmitContextBuilder_ == null) {
return appSubmitContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.getDefaultInstance() : appSubmitContext_;
} else {
return appSubmitContextBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.ApplicationSubmissionContextProto app_submit_context = 4;
*/
public Builder setAppSubmitContext(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto value) {
if (appSubmitContextBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
appSubmitContext_ = value;
} else {
appSubmitContextBuilder_.setMessage(value);
}
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationSubmissionContextProto app_submit_context = 4;
*/
public Builder setAppSubmitContext(
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.Builder builderForValue) {
if (appSubmitContextBuilder_ == null) {
appSubmitContext_ = builderForValue.build();
} else {
appSubmitContextBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationSubmissionContextProto app_submit_context = 4;
*/
public Builder mergeAppSubmitContext(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto value) {
if (appSubmitContextBuilder_ == null) {
if (((bitField0_ & 0x00000008) != 0) &&
appSubmitContext_ != null &&
appSubmitContext_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.getDefaultInstance()) {
getAppSubmitContextBuilder().mergeFrom(value);
} else {
appSubmitContext_ = value;
}
} else {
appSubmitContextBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationSubmissionContextProto app_submit_context = 4;
*/
public Builder clearAppSubmitContext() {
bitField0_ = (bitField0_ & ~0x00000008);
appSubmitContext_ = null;
if (appSubmitContextBuilder_ != null) {
appSubmitContextBuilder_.dispose();
appSubmitContextBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationSubmissionContextProto app_submit_context = 4;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.Builder getAppSubmitContextBuilder() {
bitField0_ |= 0x00000008;
onChanged();
return getAppSubmitContextFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.ApplicationSubmissionContextProto app_submit_context = 4;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProtoOrBuilder getAppSubmitContextOrBuilder() {
if (appSubmitContextBuilder_ != null) {
return appSubmitContextBuilder_.getMessageOrBuilder();
} else {
return appSubmitContext_ == null ?
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.getDefaultInstance() : appSubmitContext_;
}
}
/**
* optional .hadoop.yarn.ApplicationSubmissionContextProto app_submit_context = 4;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProtoOrBuilder>
getAppSubmitContextFieldBuilder() {
if (appSubmitContextBuilder_ == null) {
appSubmitContextBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProtoOrBuilder>(
getAppSubmitContext(),
getParentForChildren(),
isClean());
appSubmitContext_ = null;
}
return appSubmitContextBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.ApplicationHomeSubClusterProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.ApplicationHomeSubClusterProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public ApplicationHomeSubClusterProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface AddApplicationHomeSubClusterRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.AddApplicationHomeSubClusterRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
* @return Whether the appSubclusterMap field is set.
*/
boolean hasAppSubclusterMap();
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
* @return The appSubclusterMap.
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto getAppSubclusterMap();
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProtoOrBuilder getAppSubclusterMapOrBuilder();
}
/**
* Protobuf type {@code hadoop.yarn.AddApplicationHomeSubClusterRequestProto}
*/
public static final class AddApplicationHomeSubClusterRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.AddApplicationHomeSubClusterRequestProto)
AddApplicationHomeSubClusterRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use AddApplicationHomeSubClusterRequestProto.newBuilder() to construct.
private AddApplicationHomeSubClusterRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private AddApplicationHomeSubClusterRequestProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new AddApplicationHomeSubClusterRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_AddApplicationHomeSubClusterRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_AddApplicationHomeSubClusterRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto.Builder.class);
}
private int bitField0_;
public static final int APP_SUBCLUSTER_MAP_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto appSubclusterMap_;
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
* @return Whether the appSubclusterMap field is set.
*/
@java.lang.Override
public boolean hasAppSubclusterMap() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
* @return The appSubclusterMap.
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto getAppSubclusterMap() {
return appSubclusterMap_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.getDefaultInstance() : appSubclusterMap_;
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProtoOrBuilder getAppSubclusterMapOrBuilder() {
return appSubclusterMap_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.getDefaultInstance() : appSubclusterMap_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (hasAppSubclusterMap()) {
if (!getAppSubclusterMap().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getAppSubclusterMap());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getAppSubclusterMap());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto) obj;
if (hasAppSubclusterMap() != other.hasAppSubclusterMap()) return false;
if (hasAppSubclusterMap()) {
if (!getAppSubclusterMap()
.equals(other.getAppSubclusterMap())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasAppSubclusterMap()) {
hash = (37 * hash) + APP_SUBCLUSTER_MAP_FIELD_NUMBER;
hash = (53 * hash) + getAppSubclusterMap().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.AddApplicationHomeSubClusterRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.AddApplicationHomeSubClusterRequestProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_AddApplicationHomeSubClusterRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_AddApplicationHomeSubClusterRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getAppSubclusterMapFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
appSubclusterMap_ = null;
if (appSubclusterMapBuilder_ != null) {
appSubclusterMapBuilder_.dispose();
appSubclusterMapBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_AddApplicationHomeSubClusterRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.appSubclusterMap_ = appSubclusterMapBuilder_ == null
? appSubclusterMap_
: appSubclusterMapBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto.getDefaultInstance()) return this;
if (other.hasAppSubclusterMap()) {
mergeAppSubclusterMap(other.getAppSubclusterMap());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (hasAppSubclusterMap()) {
if (!getAppSubclusterMap().isInitialized()) {
return false;
}
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getAppSubclusterMapFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto appSubclusterMap_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProtoOrBuilder> appSubclusterMapBuilder_;
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
* @return Whether the appSubclusterMap field is set.
*/
public boolean hasAppSubclusterMap() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
* @return The appSubclusterMap.
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto getAppSubclusterMap() {
if (appSubclusterMapBuilder_ == null) {
return appSubclusterMap_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.getDefaultInstance() : appSubclusterMap_;
} else {
return appSubclusterMapBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder setAppSubclusterMap(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto value) {
if (appSubclusterMapBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
appSubclusterMap_ = value;
} else {
appSubclusterMapBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder setAppSubclusterMap(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.Builder builderForValue) {
if (appSubclusterMapBuilder_ == null) {
appSubclusterMap_ = builderForValue.build();
} else {
appSubclusterMapBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder mergeAppSubclusterMap(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto value) {
if (appSubclusterMapBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
appSubclusterMap_ != null &&
appSubclusterMap_ != org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.getDefaultInstance()) {
getAppSubclusterMapBuilder().mergeFrom(value);
} else {
appSubclusterMap_ = value;
}
} else {
appSubclusterMapBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder clearAppSubclusterMap() {
bitField0_ = (bitField0_ & ~0x00000001);
appSubclusterMap_ = null;
if (appSubclusterMapBuilder_ != null) {
appSubclusterMapBuilder_.dispose();
appSubclusterMapBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.Builder getAppSubclusterMapBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getAppSubclusterMapFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProtoOrBuilder getAppSubclusterMapOrBuilder() {
if (appSubclusterMapBuilder_ != null) {
return appSubclusterMapBuilder_.getMessageOrBuilder();
} else {
return appSubclusterMap_ == null ?
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.getDefaultInstance() : appSubclusterMap_;
}
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProtoOrBuilder>
getAppSubclusterMapFieldBuilder() {
if (appSubclusterMapBuilder_ == null) {
appSubclusterMapBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProtoOrBuilder>(
getAppSubclusterMap(),
getParentForChildren(),
isClean());
appSubclusterMap_ = null;
}
return appSubclusterMapBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.AddApplicationHomeSubClusterRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.AddApplicationHomeSubClusterRequestProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public AddApplicationHomeSubClusterRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface AddApplicationHomeSubClusterResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.AddApplicationHomeSubClusterResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 1;
* @return Whether the homeSubCluster field is set.
*/
boolean hasHomeSubCluster();
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 1;
* @return The homeSubCluster.
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto getHomeSubCluster();
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 1;
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder getHomeSubClusterOrBuilder();
}
/**
* Protobuf type {@code hadoop.yarn.AddApplicationHomeSubClusterResponseProto}
*/
public static final class AddApplicationHomeSubClusterResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.AddApplicationHomeSubClusterResponseProto)
AddApplicationHomeSubClusterResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use AddApplicationHomeSubClusterResponseProto.newBuilder() to construct.
private AddApplicationHomeSubClusterResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private AddApplicationHomeSubClusterResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new AddApplicationHomeSubClusterResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_AddApplicationHomeSubClusterResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_AddApplicationHomeSubClusterResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto.Builder.class);
}
private int bitField0_;
public static final int HOME_SUB_CLUSTER_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto homeSubCluster_;
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 1;
* @return Whether the homeSubCluster field is set.
*/
@java.lang.Override
public boolean hasHomeSubCluster() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 1;
* @return The homeSubCluster.
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto getHomeSubCluster() {
return homeSubCluster_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance() : homeSubCluster_;
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder getHomeSubClusterOrBuilder() {
return homeSubCluster_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance() : homeSubCluster_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getHomeSubCluster());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getHomeSubCluster());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto) obj;
if (hasHomeSubCluster() != other.hasHomeSubCluster()) return false;
if (hasHomeSubCluster()) {
if (!getHomeSubCluster()
.equals(other.getHomeSubCluster())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasHomeSubCluster()) {
hash = (37 * hash) + HOME_SUB_CLUSTER_FIELD_NUMBER;
hash = (53 * hash) + getHomeSubCluster().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.AddApplicationHomeSubClusterResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.AddApplicationHomeSubClusterResponseProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_AddApplicationHomeSubClusterResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_AddApplicationHomeSubClusterResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getHomeSubClusterFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
homeSubCluster_ = null;
if (homeSubClusterBuilder_ != null) {
homeSubClusterBuilder_.dispose();
homeSubClusterBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_AddApplicationHomeSubClusterResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.homeSubCluster_ = homeSubClusterBuilder_ == null
? homeSubCluster_
: homeSubClusterBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto.getDefaultInstance()) return this;
if (other.hasHomeSubCluster()) {
mergeHomeSubCluster(other.getHomeSubCluster());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getHomeSubClusterFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto homeSubCluster_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder> homeSubClusterBuilder_;
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 1;
* @return Whether the homeSubCluster field is set.
*/
public boolean hasHomeSubCluster() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 1;
* @return The homeSubCluster.
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto getHomeSubCluster() {
if (homeSubClusterBuilder_ == null) {
return homeSubCluster_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance() : homeSubCluster_;
} else {
return homeSubClusterBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 1;
*/
public Builder setHomeSubCluster(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto value) {
if (homeSubClusterBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
homeSubCluster_ = value;
} else {
homeSubClusterBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 1;
*/
public Builder setHomeSubCluster(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder builderForValue) {
if (homeSubClusterBuilder_ == null) {
homeSubCluster_ = builderForValue.build();
} else {
homeSubClusterBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 1;
*/
public Builder mergeHomeSubCluster(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto value) {
if (homeSubClusterBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
homeSubCluster_ != null &&
homeSubCluster_ != org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance()) {
getHomeSubClusterBuilder().mergeFrom(value);
} else {
homeSubCluster_ = value;
}
} else {
homeSubClusterBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 1;
*/
public Builder clearHomeSubCluster() {
bitField0_ = (bitField0_ & ~0x00000001);
homeSubCluster_ = null;
if (homeSubClusterBuilder_ != null) {
homeSubClusterBuilder_.dispose();
homeSubClusterBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder getHomeSubClusterBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getHomeSubClusterFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder getHomeSubClusterOrBuilder() {
if (homeSubClusterBuilder_ != null) {
return homeSubClusterBuilder_.getMessageOrBuilder();
} else {
return homeSubCluster_ == null ?
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance() : homeSubCluster_;
}
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder>
getHomeSubClusterFieldBuilder() {
if (homeSubClusterBuilder_ == null) {
homeSubClusterBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder>(
getHomeSubCluster(),
getParentForChildren(),
isClean());
homeSubCluster_ = null;
}
return homeSubClusterBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.AddApplicationHomeSubClusterResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.AddApplicationHomeSubClusterResponseProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public AddApplicationHomeSubClusterResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddApplicationHomeSubClusterResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetApplicationClusterDataRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.GetApplicationClusterDataRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
* @return Whether the applicationId field is set.
*/
boolean hasApplicationId();
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
* @return The applicationId.
*/
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId();
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder();
}
/**
* Protobuf type {@code hadoop.yarn.GetApplicationClusterDataRequestProto}
*/
public static final class GetApplicationClusterDataRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.GetApplicationClusterDataRequestProto)
GetApplicationClusterDataRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetApplicationClusterDataRequestProto.newBuilder() to construct.
private GetApplicationClusterDataRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetApplicationClusterDataRequestProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetApplicationClusterDataRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetApplicationClusterDataRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetApplicationClusterDataRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto.Builder.class);
}
private int bitField0_;
public static final int APPLICATION_ID_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
* @return Whether the applicationId field is set.
*/
@java.lang.Override
public boolean hasApplicationId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
* @return The applicationId.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getApplicationId());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getApplicationId());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto) obj;
if (hasApplicationId() != other.hasApplicationId()) return false;
if (hasApplicationId()) {
if (!getApplicationId()
.equals(other.getApplicationId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasApplicationId()) {
hash = (37 * hash) + APPLICATION_ID_FIELD_NUMBER;
hash = (53 * hash) + getApplicationId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.GetApplicationClusterDataRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.GetApplicationClusterDataRequestProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetApplicationClusterDataRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetApplicationClusterDataRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getApplicationIdFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
applicationId_ = null;
if (applicationIdBuilder_ != null) {
applicationIdBuilder_.dispose();
applicationIdBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetApplicationClusterDataRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.applicationId_ = applicationIdBuilder_ == null
? applicationId_
: applicationIdBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto.getDefaultInstance()) return this;
if (other.hasApplicationId()) {
mergeApplicationId(other.getApplicationId());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getApplicationIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> applicationIdBuilder_;
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
* @return Whether the applicationId field is set.
*/
public boolean hasApplicationId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
* @return The applicationId.
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
if (applicationIdBuilder_ == null) {
return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
} else {
return applicationIdBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
public Builder setApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
if (applicationIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
applicationId_ = value;
} else {
applicationIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
public Builder setApplicationId(
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) {
if (applicationIdBuilder_ == null) {
applicationId_ = builderForValue.build();
} else {
applicationIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
public Builder mergeApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
if (applicationIdBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
applicationId_ != null &&
applicationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) {
getApplicationIdBuilder().mergeFrom(value);
} else {
applicationId_ = value;
}
} else {
applicationIdBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
public Builder clearApplicationId() {
bitField0_ = (bitField0_ & ~0x00000001);
applicationId_ = null;
if (applicationIdBuilder_ != null) {
applicationIdBuilder_.dispose();
applicationIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getApplicationIdBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getApplicationIdFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
if (applicationIdBuilder_ != null) {
return applicationIdBuilder_.getMessageOrBuilder();
} else {
return applicationId_ == null ?
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
}
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>
getApplicationIdFieldBuilder() {
if (applicationIdBuilder_ == null) {
applicationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>(
getApplicationId(),
getParentForChildren(),
isClean());
applicationId_ = null;
}
return applicationIdBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.GetApplicationClusterDataRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.GetApplicationClusterDataRequestProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetApplicationClusterDataRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetApplicationClusterDataResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.GetApplicationClusterDataResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_home_subcluster = 1;
* @return Whether the appHomeSubcluster field is set.
*/
boolean hasAppHomeSubcluster();
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_home_subcluster = 1;
* @return The appHomeSubcluster.
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto getAppHomeSubcluster();
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_home_subcluster = 1;
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProtoOrBuilder getAppHomeSubclusterOrBuilder();
}
/**
* Protobuf type {@code hadoop.yarn.GetApplicationClusterDataResponseProto}
*/
public static final class GetApplicationClusterDataResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.GetApplicationClusterDataResponseProto)
GetApplicationClusterDataResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetApplicationClusterDataResponseProto.newBuilder() to construct.
private GetApplicationClusterDataResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetApplicationClusterDataResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetApplicationClusterDataResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetApplicationClusterDataResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetApplicationClusterDataResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto.Builder.class);
}
private int bitField0_;
public static final int APP_HOME_SUBCLUSTER_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto appHomeSubcluster_;
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_home_subcluster = 1;
* @return Whether the appHomeSubcluster field is set.
*/
@java.lang.Override
public boolean hasAppHomeSubcluster() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_home_subcluster = 1;
* @return The appHomeSubcluster.
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto getAppHomeSubcluster() {
return appHomeSubcluster_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.getDefaultInstance() : appHomeSubcluster_;
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_home_subcluster = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProtoOrBuilder getAppHomeSubclusterOrBuilder() {
return appHomeSubcluster_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.getDefaultInstance() : appHomeSubcluster_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (hasAppHomeSubcluster()) {
if (!getAppHomeSubcluster().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getAppHomeSubcluster());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getAppHomeSubcluster());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto) obj;
if (hasAppHomeSubcluster() != other.hasAppHomeSubcluster()) return false;
if (hasAppHomeSubcluster()) {
if (!getAppHomeSubcluster()
.equals(other.getAppHomeSubcluster())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasAppHomeSubcluster()) {
hash = (37 * hash) + APP_HOME_SUBCLUSTER_FIELD_NUMBER;
hash = (53 * hash) + getAppHomeSubcluster().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.GetApplicationClusterDataResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.GetApplicationClusterDataResponseProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetApplicationClusterDataResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetApplicationClusterDataResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getAppHomeSubclusterFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
appHomeSubcluster_ = null;
if (appHomeSubclusterBuilder_ != null) {
appHomeSubclusterBuilder_.dispose();
appHomeSubclusterBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetApplicationClusterDataResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.appHomeSubcluster_ = appHomeSubclusterBuilder_ == null
? appHomeSubcluster_
: appHomeSubclusterBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto.getDefaultInstance()) return this;
if (other.hasAppHomeSubcluster()) {
mergeAppHomeSubcluster(other.getAppHomeSubcluster());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (hasAppHomeSubcluster()) {
if (!getAppHomeSubcluster().isInitialized()) {
return false;
}
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getAppHomeSubclusterFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto appHomeSubcluster_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProtoOrBuilder> appHomeSubclusterBuilder_;
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_home_subcluster = 1;
* @return Whether the appHomeSubcluster field is set.
*/
public boolean hasAppHomeSubcluster() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_home_subcluster = 1;
* @return The appHomeSubcluster.
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto getAppHomeSubcluster() {
if (appHomeSubclusterBuilder_ == null) {
return appHomeSubcluster_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.getDefaultInstance() : appHomeSubcluster_;
} else {
return appHomeSubclusterBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_home_subcluster = 1;
*/
public Builder setAppHomeSubcluster(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto value) {
if (appHomeSubclusterBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
appHomeSubcluster_ = value;
} else {
appHomeSubclusterBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_home_subcluster = 1;
*/
public Builder setAppHomeSubcluster(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.Builder builderForValue) {
if (appHomeSubclusterBuilder_ == null) {
appHomeSubcluster_ = builderForValue.build();
} else {
appHomeSubclusterBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_home_subcluster = 1;
*/
public Builder mergeAppHomeSubcluster(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto value) {
if (appHomeSubclusterBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
appHomeSubcluster_ != null &&
appHomeSubcluster_ != org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.getDefaultInstance()) {
getAppHomeSubclusterBuilder().mergeFrom(value);
} else {
appHomeSubcluster_ = value;
}
} else {
appHomeSubclusterBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_home_subcluster = 1;
*/
public Builder clearAppHomeSubcluster() {
bitField0_ = (bitField0_ & ~0x00000001);
appHomeSubcluster_ = null;
if (appHomeSubclusterBuilder_ != null) {
appHomeSubclusterBuilder_.dispose();
appHomeSubclusterBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_home_subcluster = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.Builder getAppHomeSubclusterBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getAppHomeSubclusterFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_home_subcluster = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProtoOrBuilder getAppHomeSubclusterOrBuilder() {
if (appHomeSubclusterBuilder_ != null) {
return appHomeSubclusterBuilder_.getMessageOrBuilder();
} else {
return appHomeSubcluster_ == null ?
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.getDefaultInstance() : appHomeSubcluster_;
}
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_home_subcluster = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProtoOrBuilder>
getAppHomeSubclusterFieldBuilder() {
if (appHomeSubclusterBuilder_ == null) {
appHomeSubclusterBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProtoOrBuilder>(
getAppHomeSubcluster(),
getParentForChildren(),
isClean());
appHomeSubcluster_ = null;
}
return appHomeSubclusterBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.GetApplicationClusterDataResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.GetApplicationClusterDataResponseProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetApplicationClusterDataResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationClusterDataResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface UpdateApplicationHomeSubClusterRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.UpdateApplicationHomeSubClusterRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
* @return Whether the appSubclusterMap field is set.
*/
boolean hasAppSubclusterMap();
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
* @return The appSubclusterMap.
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto getAppSubclusterMap();
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProtoOrBuilder getAppSubclusterMapOrBuilder();
}
/**
* Protobuf type {@code hadoop.yarn.UpdateApplicationHomeSubClusterRequestProto}
*/
public static final class UpdateApplicationHomeSubClusterRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.UpdateApplicationHomeSubClusterRequestProto)
UpdateApplicationHomeSubClusterRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateApplicationHomeSubClusterRequestProto.newBuilder() to construct.
private UpdateApplicationHomeSubClusterRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private UpdateApplicationHomeSubClusterRequestProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new UpdateApplicationHomeSubClusterRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_UpdateApplicationHomeSubClusterRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_UpdateApplicationHomeSubClusterRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto.Builder.class);
}
private int bitField0_;
public static final int APP_SUBCLUSTER_MAP_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto appSubclusterMap_;
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
* @return Whether the appSubclusterMap field is set.
*/
@java.lang.Override
public boolean hasAppSubclusterMap() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
* @return The appSubclusterMap.
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto getAppSubclusterMap() {
return appSubclusterMap_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.getDefaultInstance() : appSubclusterMap_;
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProtoOrBuilder getAppSubclusterMapOrBuilder() {
return appSubclusterMap_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.getDefaultInstance() : appSubclusterMap_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (hasAppSubclusterMap()) {
if (!getAppSubclusterMap().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getAppSubclusterMap());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getAppSubclusterMap());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto) obj;
if (hasAppSubclusterMap() != other.hasAppSubclusterMap()) return false;
if (hasAppSubclusterMap()) {
if (!getAppSubclusterMap()
.equals(other.getAppSubclusterMap())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasAppSubclusterMap()) {
hash = (37 * hash) + APP_SUBCLUSTER_MAP_FIELD_NUMBER;
hash = (53 * hash) + getAppSubclusterMap().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.UpdateApplicationHomeSubClusterRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.UpdateApplicationHomeSubClusterRequestProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_UpdateApplicationHomeSubClusterRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_UpdateApplicationHomeSubClusterRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getAppSubclusterMapFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
appSubclusterMap_ = null;
if (appSubclusterMapBuilder_ != null) {
appSubclusterMapBuilder_.dispose();
appSubclusterMapBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_UpdateApplicationHomeSubClusterRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.appSubclusterMap_ = appSubclusterMapBuilder_ == null
? appSubclusterMap_
: appSubclusterMapBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto.getDefaultInstance()) return this;
if (other.hasAppSubclusterMap()) {
mergeAppSubclusterMap(other.getAppSubclusterMap());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (hasAppSubclusterMap()) {
if (!getAppSubclusterMap().isInitialized()) {
return false;
}
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getAppSubclusterMapFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto appSubclusterMap_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProtoOrBuilder> appSubclusterMapBuilder_;
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
* @return Whether the appSubclusterMap field is set.
*/
public boolean hasAppSubclusterMap() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
* @return The appSubclusterMap.
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto getAppSubclusterMap() {
if (appSubclusterMapBuilder_ == null) {
return appSubclusterMap_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.getDefaultInstance() : appSubclusterMap_;
} else {
return appSubclusterMapBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder setAppSubclusterMap(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto value) {
if (appSubclusterMapBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
appSubclusterMap_ = value;
} else {
appSubclusterMapBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder setAppSubclusterMap(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.Builder builderForValue) {
if (appSubclusterMapBuilder_ == null) {
appSubclusterMap_ = builderForValue.build();
} else {
appSubclusterMapBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder mergeAppSubclusterMap(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto value) {
if (appSubclusterMapBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
appSubclusterMap_ != null &&
appSubclusterMap_ != org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.getDefaultInstance()) {
getAppSubclusterMapBuilder().mergeFrom(value);
} else {
appSubclusterMap_ = value;
}
} else {
appSubclusterMapBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder clearAppSubclusterMap() {
bitField0_ = (bitField0_ & ~0x00000001);
appSubclusterMap_ = null;
if (appSubclusterMapBuilder_ != null) {
appSubclusterMapBuilder_.dispose();
appSubclusterMapBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.Builder getAppSubclusterMapBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getAppSubclusterMapFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProtoOrBuilder getAppSubclusterMapOrBuilder() {
if (appSubclusterMapBuilder_ != null) {
return appSubclusterMapBuilder_.getMessageOrBuilder();
} else {
return appSubclusterMap_ == null ?
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.getDefaultInstance() : appSubclusterMap_;
}
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProtoOrBuilder>
getAppSubclusterMapFieldBuilder() {
if (appSubclusterMapBuilder_ == null) {
appSubclusterMapBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProtoOrBuilder>(
getAppSubclusterMap(),
getParentForChildren(),
isClean());
appSubclusterMap_ = null;
}
return appSubclusterMapBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.UpdateApplicationHomeSubClusterRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.UpdateApplicationHomeSubClusterRequestProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public UpdateApplicationHomeSubClusterRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface UpdateApplicationHomeSubClusterResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.UpdateApplicationHomeSubClusterResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.yarn.UpdateApplicationHomeSubClusterResponseProto}
*/
public static final class UpdateApplicationHomeSubClusterResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.UpdateApplicationHomeSubClusterResponseProto)
UpdateApplicationHomeSubClusterResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateApplicationHomeSubClusterResponseProto.newBuilder() to construct.
private UpdateApplicationHomeSubClusterResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private UpdateApplicationHomeSubClusterResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new UpdateApplicationHomeSubClusterResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_UpdateApplicationHomeSubClusterResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_UpdateApplicationHomeSubClusterResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.UpdateApplicationHomeSubClusterResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.UpdateApplicationHomeSubClusterResponseProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_UpdateApplicationHomeSubClusterResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_UpdateApplicationHomeSubClusterResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_UpdateApplicationHomeSubClusterResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.UpdateApplicationHomeSubClusterResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.UpdateApplicationHomeSubClusterResponseProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public UpdateApplicationHomeSubClusterResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateApplicationHomeSubClusterResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetApplicationHomeSubClusterRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.GetApplicationHomeSubClusterRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
* @return Whether the applicationId field is set.
*/
boolean hasApplicationId();
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
* @return The applicationId.
*/
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId();
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder();
/**
* optional bool contains_app_submission_context = 2 [default = false];
* @return Whether the containsAppSubmissionContext field is set.
*/
boolean hasContainsAppSubmissionContext();
/**
* optional bool contains_app_submission_context = 2 [default = false];
* @return The containsAppSubmissionContext.
*/
boolean getContainsAppSubmissionContext();
}
/**
* Protobuf type {@code hadoop.yarn.GetApplicationHomeSubClusterRequestProto}
*/
public static final class GetApplicationHomeSubClusterRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.GetApplicationHomeSubClusterRequestProto)
GetApplicationHomeSubClusterRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetApplicationHomeSubClusterRequestProto.newBuilder() to construct.
private GetApplicationHomeSubClusterRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetApplicationHomeSubClusterRequestProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetApplicationHomeSubClusterRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetApplicationHomeSubClusterRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetApplicationHomeSubClusterRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto.Builder.class);
}
private int bitField0_;
public static final int APPLICATION_ID_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
* @return Whether the applicationId field is set.
*/
@java.lang.Override
public boolean hasApplicationId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
* @return The applicationId.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
}
public static final int CONTAINS_APP_SUBMISSION_CONTEXT_FIELD_NUMBER = 2;
private boolean containsAppSubmissionContext_ = false;
/**
* optional bool contains_app_submission_context = 2 [default = false];
* @return Whether the containsAppSubmissionContext field is set.
*/
@java.lang.Override
public boolean hasContainsAppSubmissionContext() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional bool contains_app_submission_context = 2 [default = false];
* @return The containsAppSubmissionContext.
*/
@java.lang.Override
public boolean getContainsAppSubmissionContext() {
return containsAppSubmissionContext_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getApplicationId());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeBool(2, containsAppSubmissionContext_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getApplicationId());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeBoolSize(2, containsAppSubmissionContext_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto) obj;
if (hasApplicationId() != other.hasApplicationId()) return false;
if (hasApplicationId()) {
if (!getApplicationId()
.equals(other.getApplicationId())) return false;
}
if (hasContainsAppSubmissionContext() != other.hasContainsAppSubmissionContext()) return false;
if (hasContainsAppSubmissionContext()) {
if (getContainsAppSubmissionContext()
!= other.getContainsAppSubmissionContext()) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasApplicationId()) {
hash = (37 * hash) + APPLICATION_ID_FIELD_NUMBER;
hash = (53 * hash) + getApplicationId().hashCode();
}
if (hasContainsAppSubmissionContext()) {
hash = (37 * hash) + CONTAINS_APP_SUBMISSION_CONTEXT_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean(
getContainsAppSubmissionContext());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.GetApplicationHomeSubClusterRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.GetApplicationHomeSubClusterRequestProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetApplicationHomeSubClusterRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetApplicationHomeSubClusterRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getApplicationIdFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
applicationId_ = null;
if (applicationIdBuilder_ != null) {
applicationIdBuilder_.dispose();
applicationIdBuilder_ = null;
}
containsAppSubmissionContext_ = false;
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetApplicationHomeSubClusterRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.applicationId_ = applicationIdBuilder_ == null
? applicationId_
: applicationIdBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.containsAppSubmissionContext_ = containsAppSubmissionContext_;
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto.getDefaultInstance()) return this;
if (other.hasApplicationId()) {
mergeApplicationId(other.getApplicationId());
}
if (other.hasContainsAppSubmissionContext()) {
setContainsAppSubmissionContext(other.getContainsAppSubmissionContext());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getApplicationIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 16: {
containsAppSubmissionContext_ = input.readBool();
bitField0_ |= 0x00000002;
break;
} // case 16
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> applicationIdBuilder_;
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
* @return Whether the applicationId field is set.
*/
public boolean hasApplicationId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
* @return The applicationId.
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
if (applicationIdBuilder_ == null) {
return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
} else {
return applicationIdBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
public Builder setApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
if (applicationIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
applicationId_ = value;
} else {
applicationIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
public Builder setApplicationId(
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) {
if (applicationIdBuilder_ == null) {
applicationId_ = builderForValue.build();
} else {
applicationIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
public Builder mergeApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
if (applicationIdBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
applicationId_ != null &&
applicationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) {
getApplicationIdBuilder().mergeFrom(value);
} else {
applicationId_ = value;
}
} else {
applicationIdBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
public Builder clearApplicationId() {
bitField0_ = (bitField0_ & ~0x00000001);
applicationId_ = null;
if (applicationIdBuilder_ != null) {
applicationIdBuilder_.dispose();
applicationIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getApplicationIdBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getApplicationIdFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
if (applicationIdBuilder_ != null) {
return applicationIdBuilder_.getMessageOrBuilder();
} else {
return applicationId_ == null ?
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
}
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>
getApplicationIdFieldBuilder() {
if (applicationIdBuilder_ == null) {
applicationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>(
getApplicationId(),
getParentForChildren(),
isClean());
applicationId_ = null;
}
return applicationIdBuilder_;
}
private boolean containsAppSubmissionContext_ ;
/**
* optional bool contains_app_submission_context = 2 [default = false];
* @return Whether the containsAppSubmissionContext field is set.
*/
@java.lang.Override
public boolean hasContainsAppSubmissionContext() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional bool contains_app_submission_context = 2 [default = false];
* @return The containsAppSubmissionContext.
*/
@java.lang.Override
public boolean getContainsAppSubmissionContext() {
return containsAppSubmissionContext_;
}
/**
* optional bool contains_app_submission_context = 2 [default = false];
* @param value The containsAppSubmissionContext to set.
* @return This builder for chaining.
*/
public Builder setContainsAppSubmissionContext(boolean value) {
containsAppSubmissionContext_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* optional bool contains_app_submission_context = 2 [default = false];
* @return This builder for chaining.
*/
public Builder clearContainsAppSubmissionContext() {
bitField0_ = (bitField0_ & ~0x00000002);
containsAppSubmissionContext_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.GetApplicationHomeSubClusterRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.GetApplicationHomeSubClusterRequestProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetApplicationHomeSubClusterRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetApplicationHomeSubClusterResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.GetApplicationHomeSubClusterResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
* @return Whether the appSubclusterMap field is set.
*/
boolean hasAppSubclusterMap();
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
* @return The appSubclusterMap.
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto getAppSubclusterMap();
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProtoOrBuilder getAppSubclusterMapOrBuilder();
}
/**
* Protobuf type {@code hadoop.yarn.GetApplicationHomeSubClusterResponseProto}
*/
public static final class GetApplicationHomeSubClusterResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.GetApplicationHomeSubClusterResponseProto)
GetApplicationHomeSubClusterResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetApplicationHomeSubClusterResponseProto.newBuilder() to construct.
private GetApplicationHomeSubClusterResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetApplicationHomeSubClusterResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetApplicationHomeSubClusterResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetApplicationHomeSubClusterResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetApplicationHomeSubClusterResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto.Builder.class);
}
private int bitField0_;
public static final int APP_SUBCLUSTER_MAP_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto appSubclusterMap_;
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
* @return Whether the appSubclusterMap field is set.
*/
@java.lang.Override
public boolean hasAppSubclusterMap() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
* @return The appSubclusterMap.
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto getAppSubclusterMap() {
return appSubclusterMap_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.getDefaultInstance() : appSubclusterMap_;
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProtoOrBuilder getAppSubclusterMapOrBuilder() {
return appSubclusterMap_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.getDefaultInstance() : appSubclusterMap_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (hasAppSubclusterMap()) {
if (!getAppSubclusterMap().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getAppSubclusterMap());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getAppSubclusterMap());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto) obj;
if (hasAppSubclusterMap() != other.hasAppSubclusterMap()) return false;
if (hasAppSubclusterMap()) {
if (!getAppSubclusterMap()
.equals(other.getAppSubclusterMap())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasAppSubclusterMap()) {
hash = (37 * hash) + APP_SUBCLUSTER_MAP_FIELD_NUMBER;
hash = (53 * hash) + getAppSubclusterMap().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.GetApplicationHomeSubClusterResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.GetApplicationHomeSubClusterResponseProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetApplicationHomeSubClusterResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetApplicationHomeSubClusterResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getAppSubclusterMapFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
appSubclusterMap_ = null;
if (appSubclusterMapBuilder_ != null) {
appSubclusterMapBuilder_.dispose();
appSubclusterMapBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetApplicationHomeSubClusterResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.appSubclusterMap_ = appSubclusterMapBuilder_ == null
? appSubclusterMap_
: appSubclusterMapBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto.getDefaultInstance()) return this;
if (other.hasAppSubclusterMap()) {
mergeAppSubclusterMap(other.getAppSubclusterMap());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (hasAppSubclusterMap()) {
if (!getAppSubclusterMap().isInitialized()) {
return false;
}
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getAppSubclusterMapFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto appSubclusterMap_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProtoOrBuilder> appSubclusterMapBuilder_;
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
* @return Whether the appSubclusterMap field is set.
*/
public boolean hasAppSubclusterMap() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
* @return The appSubclusterMap.
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto getAppSubclusterMap() {
if (appSubclusterMapBuilder_ == null) {
return appSubclusterMap_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.getDefaultInstance() : appSubclusterMap_;
} else {
return appSubclusterMapBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder setAppSubclusterMap(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto value) {
if (appSubclusterMapBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
appSubclusterMap_ = value;
} else {
appSubclusterMapBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder setAppSubclusterMap(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.Builder builderForValue) {
if (appSubclusterMapBuilder_ == null) {
appSubclusterMap_ = builderForValue.build();
} else {
appSubclusterMapBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder mergeAppSubclusterMap(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto value) {
if (appSubclusterMapBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
appSubclusterMap_ != null &&
appSubclusterMap_ != org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.getDefaultInstance()) {
getAppSubclusterMapBuilder().mergeFrom(value);
} else {
appSubclusterMap_ = value;
}
} else {
appSubclusterMapBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder clearAppSubclusterMap() {
bitField0_ = (bitField0_ & ~0x00000001);
appSubclusterMap_ = null;
if (appSubclusterMapBuilder_ != null) {
appSubclusterMapBuilder_.dispose();
appSubclusterMapBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.Builder getAppSubclusterMapBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getAppSubclusterMapFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProtoOrBuilder getAppSubclusterMapOrBuilder() {
if (appSubclusterMapBuilder_ != null) {
return appSubclusterMapBuilder_.getMessageOrBuilder();
} else {
return appSubclusterMap_ == null ?
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.getDefaultInstance() : appSubclusterMap_;
}
}
/**
* optional .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProtoOrBuilder>
getAppSubclusterMapFieldBuilder() {
if (appSubclusterMapBuilder_ == null) {
appSubclusterMapBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProtoOrBuilder>(
getAppSubclusterMap(),
getParentForChildren(),
isClean());
appSubclusterMap_ = null;
}
return appSubclusterMapBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.GetApplicationHomeSubClusterResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.GetApplicationHomeSubClusterResponseProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetApplicationHomeSubClusterResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationHomeSubClusterResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetApplicationsHomeSubClusterRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.GetApplicationsHomeSubClusterRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
boolean hasSubClusterId();
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
* @return The subClusterId.
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto getSubClusterId();
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder getSubClusterIdOrBuilder();
}
/**
* Protobuf type {@code hadoop.yarn.GetApplicationsHomeSubClusterRequestProto}
*/
public static final class GetApplicationsHomeSubClusterRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.GetApplicationsHomeSubClusterRequestProto)
GetApplicationsHomeSubClusterRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetApplicationsHomeSubClusterRequestProto.newBuilder() to construct.
private GetApplicationsHomeSubClusterRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetApplicationsHomeSubClusterRequestProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetApplicationsHomeSubClusterRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetApplicationsHomeSubClusterRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetApplicationsHomeSubClusterRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto.Builder.class);
}
private int bitField0_;
public static final int SUB_CLUSTER_ID_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto subClusterId_;
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
@java.lang.Override
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
* @return The subClusterId.
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto getSubClusterId() {
return subClusterId_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance() : subClusterId_;
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder getSubClusterIdOrBuilder() {
return subClusterId_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance() : subClusterId_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getSubClusterId());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getSubClusterId());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto) obj;
if (hasSubClusterId() != other.hasSubClusterId()) return false;
if (hasSubClusterId()) {
if (!getSubClusterId()
.equals(other.getSubClusterId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasSubClusterId()) {
hash = (37 * hash) + SUB_CLUSTER_ID_FIELD_NUMBER;
hash = (53 * hash) + getSubClusterId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.GetApplicationsHomeSubClusterRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.GetApplicationsHomeSubClusterRequestProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetApplicationsHomeSubClusterRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetApplicationsHomeSubClusterRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getSubClusterIdFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
subClusterId_ = null;
if (subClusterIdBuilder_ != null) {
subClusterIdBuilder_.dispose();
subClusterIdBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetApplicationsHomeSubClusterRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.subClusterId_ = subClusterIdBuilder_ == null
? subClusterId_
: subClusterIdBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto.getDefaultInstance()) return this;
if (other.hasSubClusterId()) {
mergeSubClusterId(other.getSubClusterId());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getSubClusterIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto subClusterId_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder> subClusterIdBuilder_;
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
* @return Whether the subClusterId field is set.
*/
public boolean hasSubClusterId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
* @return The subClusterId.
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto getSubClusterId() {
if (subClusterIdBuilder_ == null) {
return subClusterId_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance() : subClusterId_;
} else {
return subClusterIdBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
public Builder setSubClusterId(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto value) {
if (subClusterIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
subClusterId_ = value;
} else {
subClusterIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
public Builder setSubClusterId(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder builderForValue) {
if (subClusterIdBuilder_ == null) {
subClusterId_ = builderForValue.build();
} else {
subClusterIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
public Builder mergeSubClusterId(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto value) {
if (subClusterIdBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
subClusterId_ != null &&
subClusterId_ != org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance()) {
getSubClusterIdBuilder().mergeFrom(value);
} else {
subClusterId_ = value;
}
} else {
subClusterIdBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
public Builder clearSubClusterId() {
bitField0_ = (bitField0_ & ~0x00000001);
subClusterId_ = null;
if (subClusterIdBuilder_ != null) {
subClusterIdBuilder_.dispose();
subClusterIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder getSubClusterIdBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getSubClusterIdFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder getSubClusterIdOrBuilder() {
if (subClusterIdBuilder_ != null) {
return subClusterIdBuilder_.getMessageOrBuilder();
} else {
return subClusterId_ == null ?
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance() : subClusterId_;
}
}
/**
* optional .hadoop.yarn.SubClusterIdProto sub_cluster_id = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder>
getSubClusterIdFieldBuilder() {
if (subClusterIdBuilder_ == null) {
subClusterIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder>(
getSubClusterId(),
getParentForChildren(),
isClean());
subClusterId_ = null;
}
return subClusterIdBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.GetApplicationsHomeSubClusterRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.GetApplicationsHomeSubClusterRequestProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetApplicationsHomeSubClusterRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetApplicationsHomeSubClusterResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.GetApplicationsHomeSubClusterResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* repeated .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
java.util.List
getAppSubclusterMapList();
/**
* repeated .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto getAppSubclusterMap(int index);
/**
* repeated .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
int getAppSubclusterMapCount();
/**
* repeated .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
java.util.List extends org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProtoOrBuilder>
getAppSubclusterMapOrBuilderList();
/**
* repeated .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProtoOrBuilder getAppSubclusterMapOrBuilder(
int index);
}
/**
* Protobuf type {@code hadoop.yarn.GetApplicationsHomeSubClusterResponseProto}
*/
public static final class GetApplicationsHomeSubClusterResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.GetApplicationsHomeSubClusterResponseProto)
GetApplicationsHomeSubClusterResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetApplicationsHomeSubClusterResponseProto.newBuilder() to construct.
private GetApplicationsHomeSubClusterResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetApplicationsHomeSubClusterResponseProto() {
appSubclusterMap_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetApplicationsHomeSubClusterResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetApplicationsHomeSubClusterResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetApplicationsHomeSubClusterResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto.Builder.class);
}
public static final int APP_SUBCLUSTER_MAP_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List appSubclusterMap_;
/**
* repeated .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
@java.lang.Override
public java.util.List getAppSubclusterMapList() {
return appSubclusterMap_;
}
/**
* repeated .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
@java.lang.Override
public java.util.List extends org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProtoOrBuilder>
getAppSubclusterMapOrBuilderList() {
return appSubclusterMap_;
}
/**
* repeated .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
@java.lang.Override
public int getAppSubclusterMapCount() {
return appSubclusterMap_.size();
}
/**
* repeated .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto getAppSubclusterMap(int index) {
return appSubclusterMap_.get(index);
}
/**
* repeated .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProtoOrBuilder getAppSubclusterMapOrBuilder(
int index) {
return appSubclusterMap_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
for (int i = 0; i < getAppSubclusterMapCount(); i++) {
if (!getAppSubclusterMap(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < appSubclusterMap_.size(); i++) {
output.writeMessage(1, appSubclusterMap_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < appSubclusterMap_.size(); i++) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, appSubclusterMap_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto) obj;
if (!getAppSubclusterMapList()
.equals(other.getAppSubclusterMapList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getAppSubclusterMapCount() > 0) {
hash = (37 * hash) + APP_SUBCLUSTER_MAP_FIELD_NUMBER;
hash = (53 * hash) + getAppSubclusterMapList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.GetApplicationsHomeSubClusterResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.GetApplicationsHomeSubClusterResponseProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetApplicationsHomeSubClusterResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetApplicationsHomeSubClusterResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (appSubclusterMapBuilder_ == null) {
appSubclusterMap_ = java.util.Collections.emptyList();
} else {
appSubclusterMap_ = null;
appSubclusterMapBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetApplicationsHomeSubClusterResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto result) {
if (appSubclusterMapBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
appSubclusterMap_ = java.util.Collections.unmodifiableList(appSubclusterMap_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.appSubclusterMap_ = appSubclusterMap_;
} else {
result.appSubclusterMap_ = appSubclusterMapBuilder_.build();
}
}
private void buildPartial0(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto.getDefaultInstance()) return this;
if (appSubclusterMapBuilder_ == null) {
if (!other.appSubclusterMap_.isEmpty()) {
if (appSubclusterMap_.isEmpty()) {
appSubclusterMap_ = other.appSubclusterMap_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureAppSubclusterMapIsMutable();
appSubclusterMap_.addAll(other.appSubclusterMap_);
}
onChanged();
}
} else {
if (!other.appSubclusterMap_.isEmpty()) {
if (appSubclusterMapBuilder_.isEmpty()) {
appSubclusterMapBuilder_.dispose();
appSubclusterMapBuilder_ = null;
appSubclusterMap_ = other.appSubclusterMap_;
bitField0_ = (bitField0_ & ~0x00000001);
appSubclusterMapBuilder_ =
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getAppSubclusterMapFieldBuilder() : null;
} else {
appSubclusterMapBuilder_.addAllMessages(other.appSubclusterMap_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
for (int i = 0; i < getAppSubclusterMapCount(); i++) {
if (!getAppSubclusterMap(i).isInitialized()) {
return false;
}
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto m =
input.readMessage(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.PARSER,
extensionRegistry);
if (appSubclusterMapBuilder_ == null) {
ensureAppSubclusterMapIsMutable();
appSubclusterMap_.add(m);
} else {
appSubclusterMapBuilder_.addMessage(m);
}
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List appSubclusterMap_ =
java.util.Collections.emptyList();
private void ensureAppSubclusterMapIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
appSubclusterMap_ = new java.util.ArrayList(appSubclusterMap_);
bitField0_ |= 0x00000001;
}
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProtoOrBuilder> appSubclusterMapBuilder_;
/**
* repeated .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
public java.util.List getAppSubclusterMapList() {
if (appSubclusterMapBuilder_ == null) {
return java.util.Collections.unmodifiableList(appSubclusterMap_);
} else {
return appSubclusterMapBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
public int getAppSubclusterMapCount() {
if (appSubclusterMapBuilder_ == null) {
return appSubclusterMap_.size();
} else {
return appSubclusterMapBuilder_.getCount();
}
}
/**
* repeated .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto getAppSubclusterMap(int index) {
if (appSubclusterMapBuilder_ == null) {
return appSubclusterMap_.get(index);
} else {
return appSubclusterMapBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder setAppSubclusterMap(
int index, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto value) {
if (appSubclusterMapBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAppSubclusterMapIsMutable();
appSubclusterMap_.set(index, value);
onChanged();
} else {
appSubclusterMapBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder setAppSubclusterMap(
int index, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.Builder builderForValue) {
if (appSubclusterMapBuilder_ == null) {
ensureAppSubclusterMapIsMutable();
appSubclusterMap_.set(index, builderForValue.build());
onChanged();
} else {
appSubclusterMapBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder addAppSubclusterMap(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto value) {
if (appSubclusterMapBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAppSubclusterMapIsMutable();
appSubclusterMap_.add(value);
onChanged();
} else {
appSubclusterMapBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder addAppSubclusterMap(
int index, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto value) {
if (appSubclusterMapBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAppSubclusterMapIsMutable();
appSubclusterMap_.add(index, value);
onChanged();
} else {
appSubclusterMapBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder addAppSubclusterMap(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.Builder builderForValue) {
if (appSubclusterMapBuilder_ == null) {
ensureAppSubclusterMapIsMutable();
appSubclusterMap_.add(builderForValue.build());
onChanged();
} else {
appSubclusterMapBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder addAppSubclusterMap(
int index, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.Builder builderForValue) {
if (appSubclusterMapBuilder_ == null) {
ensureAppSubclusterMapIsMutable();
appSubclusterMap_.add(index, builderForValue.build());
onChanged();
} else {
appSubclusterMapBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder addAllAppSubclusterMap(
java.lang.Iterable extends org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto> values) {
if (appSubclusterMapBuilder_ == null) {
ensureAppSubclusterMapIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, appSubclusterMap_);
onChanged();
} else {
appSubclusterMapBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder clearAppSubclusterMap() {
if (appSubclusterMapBuilder_ == null) {
appSubclusterMap_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
appSubclusterMapBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder removeAppSubclusterMap(int index) {
if (appSubclusterMapBuilder_ == null) {
ensureAppSubclusterMapIsMutable();
appSubclusterMap_.remove(index);
onChanged();
} else {
appSubclusterMapBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.Builder getAppSubclusterMapBuilder(
int index) {
return getAppSubclusterMapFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProtoOrBuilder getAppSubclusterMapOrBuilder(
int index) {
if (appSubclusterMapBuilder_ == null) {
return appSubclusterMap_.get(index); } else {
return appSubclusterMapBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
public java.util.List extends org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProtoOrBuilder>
getAppSubclusterMapOrBuilderList() {
if (appSubclusterMapBuilder_ != null) {
return appSubclusterMapBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(appSubclusterMap_);
}
}
/**
* repeated .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.Builder addAppSubclusterMapBuilder() {
return getAppSubclusterMapFieldBuilder().addBuilder(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.Builder addAppSubclusterMapBuilder(
int index) {
return getAppSubclusterMapFieldBuilder().addBuilder(
index, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.ApplicationHomeSubClusterProto app_subcluster_map = 1;
*/
public java.util.List
getAppSubclusterMapBuilderList() {
return getAppSubclusterMapFieldBuilder().getBuilderList();
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProtoOrBuilder>
getAppSubclusterMapFieldBuilder() {
if (appSubclusterMapBuilder_ == null) {
appSubclusterMapBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ApplicationHomeSubClusterProtoOrBuilder>(
appSubclusterMap_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
appSubclusterMap_ = null;
}
return appSubclusterMapBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.GetApplicationsHomeSubClusterResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.GetApplicationsHomeSubClusterResponseProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetApplicationsHomeSubClusterResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetApplicationsHomeSubClusterResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface DeleteApplicationHomeSubClusterRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.DeleteApplicationHomeSubClusterRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
* @return Whether the applicationId field is set.
*/
boolean hasApplicationId();
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
* @return The applicationId.
*/
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId();
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder();
}
/**
* Protobuf type {@code hadoop.yarn.DeleteApplicationHomeSubClusterRequestProto}
*/
public static final class DeleteApplicationHomeSubClusterRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.DeleteApplicationHomeSubClusterRequestProto)
DeleteApplicationHomeSubClusterRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use DeleteApplicationHomeSubClusterRequestProto.newBuilder() to construct.
private DeleteApplicationHomeSubClusterRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private DeleteApplicationHomeSubClusterRequestProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new DeleteApplicationHomeSubClusterRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeleteApplicationHomeSubClusterRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeleteApplicationHomeSubClusterRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto.Builder.class);
}
private int bitField0_;
public static final int APPLICATION_ID_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
* @return Whether the applicationId field is set.
*/
@java.lang.Override
public boolean hasApplicationId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
* @return The applicationId.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getApplicationId());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getApplicationId());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto) obj;
if (hasApplicationId() != other.hasApplicationId()) return false;
if (hasApplicationId()) {
if (!getApplicationId()
.equals(other.getApplicationId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasApplicationId()) {
hash = (37 * hash) + APPLICATION_ID_FIELD_NUMBER;
hash = (53 * hash) + getApplicationId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.DeleteApplicationHomeSubClusterRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.DeleteApplicationHomeSubClusterRequestProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeleteApplicationHomeSubClusterRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeleteApplicationHomeSubClusterRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getApplicationIdFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
applicationId_ = null;
if (applicationIdBuilder_ != null) {
applicationIdBuilder_.dispose();
applicationIdBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeleteApplicationHomeSubClusterRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.applicationId_ = applicationIdBuilder_ == null
? applicationId_
: applicationIdBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto.getDefaultInstance()) return this;
if (other.hasApplicationId()) {
mergeApplicationId(other.getApplicationId());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getApplicationIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> applicationIdBuilder_;
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
* @return Whether the applicationId field is set.
*/
public boolean hasApplicationId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
* @return The applicationId.
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
if (applicationIdBuilder_ == null) {
return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
} else {
return applicationIdBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
public Builder setApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
if (applicationIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
applicationId_ = value;
} else {
applicationIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
public Builder setApplicationId(
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) {
if (applicationIdBuilder_ == null) {
applicationId_ = builderForValue.build();
} else {
applicationIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
public Builder mergeApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
if (applicationIdBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
applicationId_ != null &&
applicationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) {
getApplicationIdBuilder().mergeFrom(value);
} else {
applicationId_ = value;
}
} else {
applicationIdBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
public Builder clearApplicationId() {
bitField0_ = (bitField0_ & ~0x00000001);
applicationId_ = null;
if (applicationIdBuilder_ != null) {
applicationIdBuilder_.dispose();
applicationIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getApplicationIdBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getApplicationIdFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
if (applicationIdBuilder_ != null) {
return applicationIdBuilder_.getMessageOrBuilder();
} else {
return applicationId_ == null ?
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
}
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>
getApplicationIdFieldBuilder() {
if (applicationIdBuilder_ == null) {
applicationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>(
getApplicationId(),
getParentForChildren(),
isClean());
applicationId_ = null;
}
return applicationIdBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.DeleteApplicationHomeSubClusterRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.DeleteApplicationHomeSubClusterRequestProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public DeleteApplicationHomeSubClusterRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface DeleteApplicationHomeSubClusterResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.DeleteApplicationHomeSubClusterResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.yarn.DeleteApplicationHomeSubClusterResponseProto}
*/
public static final class DeleteApplicationHomeSubClusterResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.DeleteApplicationHomeSubClusterResponseProto)
DeleteApplicationHomeSubClusterResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use DeleteApplicationHomeSubClusterResponseProto.newBuilder() to construct.
private DeleteApplicationHomeSubClusterResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private DeleteApplicationHomeSubClusterResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new DeleteApplicationHomeSubClusterResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeleteApplicationHomeSubClusterResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeleteApplicationHomeSubClusterResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.DeleteApplicationHomeSubClusterResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.DeleteApplicationHomeSubClusterResponseProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeleteApplicationHomeSubClusterResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeleteApplicationHomeSubClusterResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeleteApplicationHomeSubClusterResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.DeleteApplicationHomeSubClusterResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.DeleteApplicationHomeSubClusterResponseProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public DeleteApplicationHomeSubClusterResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteApplicationHomeSubClusterResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface ReservationHomeSubClusterProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.ReservationHomeSubClusterProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
* @return Whether the reservationId field is set.
*/
boolean hasReservationId();
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
* @return The reservationId.
*/
org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId();
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
*/
org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder();
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 2;
* @return Whether the homeSubCluster field is set.
*/
boolean hasHomeSubCluster();
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 2;
* @return The homeSubCluster.
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto getHomeSubCluster();
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 2;
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder getHomeSubClusterOrBuilder();
}
/**
* Protobuf type {@code hadoop.yarn.ReservationHomeSubClusterProto}
*/
public static final class ReservationHomeSubClusterProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.ReservationHomeSubClusterProto)
ReservationHomeSubClusterProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use ReservationHomeSubClusterProto.newBuilder() to construct.
private ReservationHomeSubClusterProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private ReservationHomeSubClusterProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ReservationHomeSubClusterProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_ReservationHomeSubClusterProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_ReservationHomeSubClusterProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.Builder.class);
}
private int bitField0_;
public static final int RESERVATION_ID_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto reservationId_;
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
* @return Whether the reservationId field is set.
*/
@java.lang.Override
public boolean hasReservationId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
* @return The reservationId.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId() {
return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_;
}
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder() {
return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_;
}
public static final int HOME_SUB_CLUSTER_FIELD_NUMBER = 2;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto homeSubCluster_;
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 2;
* @return Whether the homeSubCluster field is set.
*/
@java.lang.Override
public boolean hasHomeSubCluster() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 2;
* @return The homeSubCluster.
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto getHomeSubCluster() {
return homeSubCluster_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance() : homeSubCluster_;
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 2;
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder getHomeSubClusterOrBuilder() {
return homeSubCluster_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance() : homeSubCluster_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getReservationId());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getHomeSubCluster());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getReservationId());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(2, getHomeSubCluster());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto) obj;
if (hasReservationId() != other.hasReservationId()) return false;
if (hasReservationId()) {
if (!getReservationId()
.equals(other.getReservationId())) return false;
}
if (hasHomeSubCluster() != other.hasHomeSubCluster()) return false;
if (hasHomeSubCluster()) {
if (!getHomeSubCluster()
.equals(other.getHomeSubCluster())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasReservationId()) {
hash = (37 * hash) + RESERVATION_ID_FIELD_NUMBER;
hash = (53 * hash) + getReservationId().hashCode();
}
if (hasHomeSubCluster()) {
hash = (37 * hash) + HOME_SUB_CLUSTER_FIELD_NUMBER;
hash = (53 * hash) + getHomeSubCluster().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.ReservationHomeSubClusterProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.ReservationHomeSubClusterProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_ReservationHomeSubClusterProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_ReservationHomeSubClusterProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getReservationIdFieldBuilder();
getHomeSubClusterFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
reservationId_ = null;
if (reservationIdBuilder_ != null) {
reservationIdBuilder_.dispose();
reservationIdBuilder_ = null;
}
homeSubCluster_ = null;
if (homeSubClusterBuilder_ != null) {
homeSubClusterBuilder_.dispose();
homeSubClusterBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_ReservationHomeSubClusterProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.reservationId_ = reservationIdBuilder_ == null
? reservationId_
: reservationIdBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.homeSubCluster_ = homeSubClusterBuilder_ == null
? homeSubCluster_
: homeSubClusterBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.getDefaultInstance()) return this;
if (other.hasReservationId()) {
mergeReservationId(other.getReservationId());
}
if (other.hasHomeSubCluster()) {
mergeHomeSubCluster(other.getHomeSubCluster());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getReservationIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
input.readMessage(
getHomeSubClusterFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto reservationId_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder> reservationIdBuilder_;
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
* @return Whether the reservationId field is set.
*/
public boolean hasReservationId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
* @return The reservationId.
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId() {
if (reservationIdBuilder_ == null) {
return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_;
} else {
return reservationIdBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
*/
public Builder setReservationId(org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto value) {
if (reservationIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
reservationId_ = value;
} else {
reservationIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
*/
public Builder setReservationId(
org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder builderForValue) {
if (reservationIdBuilder_ == null) {
reservationId_ = builderForValue.build();
} else {
reservationIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
*/
public Builder mergeReservationId(org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto value) {
if (reservationIdBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
reservationId_ != null &&
reservationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance()) {
getReservationIdBuilder().mergeFrom(value);
} else {
reservationId_ = value;
}
} else {
reservationIdBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
*/
public Builder clearReservationId() {
bitField0_ = (bitField0_ & ~0x00000001);
reservationId_ = null;
if (reservationIdBuilder_ != null) {
reservationIdBuilder_.dispose();
reservationIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder getReservationIdBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getReservationIdFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder() {
if (reservationIdBuilder_ != null) {
return reservationIdBuilder_.getMessageOrBuilder();
} else {
return reservationId_ == null ?
org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_;
}
}
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder>
getReservationIdFieldBuilder() {
if (reservationIdBuilder_ == null) {
reservationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder>(
getReservationId(),
getParentForChildren(),
isClean());
reservationId_ = null;
}
return reservationIdBuilder_;
}
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto homeSubCluster_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder> homeSubClusterBuilder_;
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 2;
* @return Whether the homeSubCluster field is set.
*/
public boolean hasHomeSubCluster() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 2;
* @return The homeSubCluster.
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto getHomeSubCluster() {
if (homeSubClusterBuilder_ == null) {
return homeSubCluster_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance() : homeSubCluster_;
} else {
return homeSubClusterBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 2;
*/
public Builder setHomeSubCluster(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto value) {
if (homeSubClusterBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
homeSubCluster_ = value;
} else {
homeSubClusterBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 2;
*/
public Builder setHomeSubCluster(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder builderForValue) {
if (homeSubClusterBuilder_ == null) {
homeSubCluster_ = builderForValue.build();
} else {
homeSubClusterBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 2;
*/
public Builder mergeHomeSubCluster(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto value) {
if (homeSubClusterBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0) &&
homeSubCluster_ != null &&
homeSubCluster_ != org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance()) {
getHomeSubClusterBuilder().mergeFrom(value);
} else {
homeSubCluster_ = value;
}
} else {
homeSubClusterBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 2;
*/
public Builder clearHomeSubCluster() {
bitField0_ = (bitField0_ & ~0x00000002);
homeSubCluster_ = null;
if (homeSubClusterBuilder_ != null) {
homeSubClusterBuilder_.dispose();
homeSubClusterBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 2;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder getHomeSubClusterBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getHomeSubClusterFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 2;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder getHomeSubClusterOrBuilder() {
if (homeSubClusterBuilder_ != null) {
return homeSubClusterBuilder_.getMessageOrBuilder();
} else {
return homeSubCluster_ == null ?
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance() : homeSubCluster_;
}
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 2;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder>
getHomeSubClusterFieldBuilder() {
if (homeSubClusterBuilder_ == null) {
homeSubClusterBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder>(
getHomeSubCluster(),
getParentForChildren(),
isClean());
homeSubCluster_ = null;
}
return homeSubClusterBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.ReservationHomeSubClusterProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.ReservationHomeSubClusterProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public ReservationHomeSubClusterProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface AddReservationHomeSubClusterRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.AddReservationHomeSubClusterRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
* @return Whether the appSubclusterMap field is set.
*/
boolean hasAppSubclusterMap();
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
* @return The appSubclusterMap.
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto getAppSubclusterMap();
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProtoOrBuilder getAppSubclusterMapOrBuilder();
}
/**
* Protobuf type {@code hadoop.yarn.AddReservationHomeSubClusterRequestProto}
*/
public static final class AddReservationHomeSubClusterRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.AddReservationHomeSubClusterRequestProto)
AddReservationHomeSubClusterRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use AddReservationHomeSubClusterRequestProto.newBuilder() to construct.
private AddReservationHomeSubClusterRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private AddReservationHomeSubClusterRequestProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new AddReservationHomeSubClusterRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_AddReservationHomeSubClusterRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_AddReservationHomeSubClusterRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto.Builder.class);
}
private int bitField0_;
public static final int APP_SUBCLUSTER_MAP_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto appSubclusterMap_;
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
* @return Whether the appSubclusterMap field is set.
*/
@java.lang.Override
public boolean hasAppSubclusterMap() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
* @return The appSubclusterMap.
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto getAppSubclusterMap() {
return appSubclusterMap_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.getDefaultInstance() : appSubclusterMap_;
}
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProtoOrBuilder getAppSubclusterMapOrBuilder() {
return appSubclusterMap_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.getDefaultInstance() : appSubclusterMap_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getAppSubclusterMap());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getAppSubclusterMap());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto) obj;
if (hasAppSubclusterMap() != other.hasAppSubclusterMap()) return false;
if (hasAppSubclusterMap()) {
if (!getAppSubclusterMap()
.equals(other.getAppSubclusterMap())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasAppSubclusterMap()) {
hash = (37 * hash) + APP_SUBCLUSTER_MAP_FIELD_NUMBER;
hash = (53 * hash) + getAppSubclusterMap().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.AddReservationHomeSubClusterRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.AddReservationHomeSubClusterRequestProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_AddReservationHomeSubClusterRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_AddReservationHomeSubClusterRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getAppSubclusterMapFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
appSubclusterMap_ = null;
if (appSubclusterMapBuilder_ != null) {
appSubclusterMapBuilder_.dispose();
appSubclusterMapBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_AddReservationHomeSubClusterRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.appSubclusterMap_ = appSubclusterMapBuilder_ == null
? appSubclusterMap_
: appSubclusterMapBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto.getDefaultInstance()) return this;
if (other.hasAppSubclusterMap()) {
mergeAppSubclusterMap(other.getAppSubclusterMap());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getAppSubclusterMapFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto appSubclusterMap_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProtoOrBuilder> appSubclusterMapBuilder_;
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
* @return Whether the appSubclusterMap field is set.
*/
public boolean hasAppSubclusterMap() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
* @return The appSubclusterMap.
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto getAppSubclusterMap() {
if (appSubclusterMapBuilder_ == null) {
return appSubclusterMap_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.getDefaultInstance() : appSubclusterMap_;
} else {
return appSubclusterMapBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder setAppSubclusterMap(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto value) {
if (appSubclusterMapBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
appSubclusterMap_ = value;
} else {
appSubclusterMapBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder setAppSubclusterMap(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.Builder builderForValue) {
if (appSubclusterMapBuilder_ == null) {
appSubclusterMap_ = builderForValue.build();
} else {
appSubclusterMapBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder mergeAppSubclusterMap(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto value) {
if (appSubclusterMapBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
appSubclusterMap_ != null &&
appSubclusterMap_ != org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.getDefaultInstance()) {
getAppSubclusterMapBuilder().mergeFrom(value);
} else {
appSubclusterMap_ = value;
}
} else {
appSubclusterMapBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder clearAppSubclusterMap() {
bitField0_ = (bitField0_ & ~0x00000001);
appSubclusterMap_ = null;
if (appSubclusterMapBuilder_ != null) {
appSubclusterMapBuilder_.dispose();
appSubclusterMapBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.Builder getAppSubclusterMapBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getAppSubclusterMapFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProtoOrBuilder getAppSubclusterMapOrBuilder() {
if (appSubclusterMapBuilder_ != null) {
return appSubclusterMapBuilder_.getMessageOrBuilder();
} else {
return appSubclusterMap_ == null ?
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.getDefaultInstance() : appSubclusterMap_;
}
}
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProtoOrBuilder>
getAppSubclusterMapFieldBuilder() {
if (appSubclusterMapBuilder_ == null) {
appSubclusterMapBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProtoOrBuilder>(
getAppSubclusterMap(),
getParentForChildren(),
isClean());
appSubclusterMap_ = null;
}
return appSubclusterMapBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.AddReservationHomeSubClusterRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.AddReservationHomeSubClusterRequestProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public AddReservationHomeSubClusterRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface AddReservationHomeSubClusterResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.AddReservationHomeSubClusterResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 1;
* @return Whether the homeSubCluster field is set.
*/
boolean hasHomeSubCluster();
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 1;
* @return The homeSubCluster.
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto getHomeSubCluster();
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 1;
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder getHomeSubClusterOrBuilder();
}
/**
* Protobuf type {@code hadoop.yarn.AddReservationHomeSubClusterResponseProto}
*/
public static final class AddReservationHomeSubClusterResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.AddReservationHomeSubClusterResponseProto)
AddReservationHomeSubClusterResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use AddReservationHomeSubClusterResponseProto.newBuilder() to construct.
private AddReservationHomeSubClusterResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private AddReservationHomeSubClusterResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new AddReservationHomeSubClusterResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_AddReservationHomeSubClusterResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_AddReservationHomeSubClusterResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto.Builder.class);
}
private int bitField0_;
public static final int HOME_SUB_CLUSTER_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto homeSubCluster_;
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 1;
* @return Whether the homeSubCluster field is set.
*/
@java.lang.Override
public boolean hasHomeSubCluster() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 1;
* @return The homeSubCluster.
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto getHomeSubCluster() {
return homeSubCluster_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance() : homeSubCluster_;
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder getHomeSubClusterOrBuilder() {
return homeSubCluster_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance() : homeSubCluster_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getHomeSubCluster());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getHomeSubCluster());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto) obj;
if (hasHomeSubCluster() != other.hasHomeSubCluster()) return false;
if (hasHomeSubCluster()) {
if (!getHomeSubCluster()
.equals(other.getHomeSubCluster())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasHomeSubCluster()) {
hash = (37 * hash) + HOME_SUB_CLUSTER_FIELD_NUMBER;
hash = (53 * hash) + getHomeSubCluster().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.AddReservationHomeSubClusterResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.AddReservationHomeSubClusterResponseProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_AddReservationHomeSubClusterResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_AddReservationHomeSubClusterResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getHomeSubClusterFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
homeSubCluster_ = null;
if (homeSubClusterBuilder_ != null) {
homeSubClusterBuilder_.dispose();
homeSubClusterBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_AddReservationHomeSubClusterResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.homeSubCluster_ = homeSubClusterBuilder_ == null
? homeSubCluster_
: homeSubClusterBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto.getDefaultInstance()) return this;
if (other.hasHomeSubCluster()) {
mergeHomeSubCluster(other.getHomeSubCluster());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getHomeSubClusterFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto homeSubCluster_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder> homeSubClusterBuilder_;
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 1;
* @return Whether the homeSubCluster field is set.
*/
public boolean hasHomeSubCluster() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 1;
* @return The homeSubCluster.
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto getHomeSubCluster() {
if (homeSubClusterBuilder_ == null) {
return homeSubCluster_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance() : homeSubCluster_;
} else {
return homeSubClusterBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 1;
*/
public Builder setHomeSubCluster(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto value) {
if (homeSubClusterBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
homeSubCluster_ = value;
} else {
homeSubClusterBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 1;
*/
public Builder setHomeSubCluster(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder builderForValue) {
if (homeSubClusterBuilder_ == null) {
homeSubCluster_ = builderForValue.build();
} else {
homeSubClusterBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 1;
*/
public Builder mergeHomeSubCluster(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto value) {
if (homeSubClusterBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
homeSubCluster_ != null &&
homeSubCluster_ != org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance()) {
getHomeSubClusterBuilder().mergeFrom(value);
} else {
homeSubCluster_ = value;
}
} else {
homeSubClusterBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 1;
*/
public Builder clearHomeSubCluster() {
bitField0_ = (bitField0_ & ~0x00000001);
homeSubCluster_ = null;
if (homeSubClusterBuilder_ != null) {
homeSubClusterBuilder_.dispose();
homeSubClusterBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder getHomeSubClusterBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getHomeSubClusterFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder getHomeSubClusterOrBuilder() {
if (homeSubClusterBuilder_ != null) {
return homeSubClusterBuilder_.getMessageOrBuilder();
} else {
return homeSubCluster_ == null ?
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.getDefaultInstance() : homeSubCluster_;
}
}
/**
* optional .hadoop.yarn.SubClusterIdProto home_sub_cluster = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder>
getHomeSubClusterFieldBuilder() {
if (homeSubClusterBuilder_ == null) {
homeSubClusterBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProtoOrBuilder>(
getHomeSubCluster(),
getParentForChildren(),
isClean());
homeSubCluster_ = null;
}
return homeSubClusterBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.AddReservationHomeSubClusterResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.AddReservationHomeSubClusterResponseProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public AddReservationHomeSubClusterResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.AddReservationHomeSubClusterResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface UpdateReservationHomeSubClusterRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.UpdateReservationHomeSubClusterRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
* @return Whether the appSubclusterMap field is set.
*/
boolean hasAppSubclusterMap();
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
* @return The appSubclusterMap.
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto getAppSubclusterMap();
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProtoOrBuilder getAppSubclusterMapOrBuilder();
}
/**
* Protobuf type {@code hadoop.yarn.UpdateReservationHomeSubClusterRequestProto}
*/
public static final class UpdateReservationHomeSubClusterRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.UpdateReservationHomeSubClusterRequestProto)
UpdateReservationHomeSubClusterRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateReservationHomeSubClusterRequestProto.newBuilder() to construct.
private UpdateReservationHomeSubClusterRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private UpdateReservationHomeSubClusterRequestProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new UpdateReservationHomeSubClusterRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_UpdateReservationHomeSubClusterRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_UpdateReservationHomeSubClusterRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto.Builder.class);
}
private int bitField0_;
public static final int APP_SUBCLUSTER_MAP_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto appSubclusterMap_;
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
* @return Whether the appSubclusterMap field is set.
*/
@java.lang.Override
public boolean hasAppSubclusterMap() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
* @return The appSubclusterMap.
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto getAppSubclusterMap() {
return appSubclusterMap_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.getDefaultInstance() : appSubclusterMap_;
}
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProtoOrBuilder getAppSubclusterMapOrBuilder() {
return appSubclusterMap_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.getDefaultInstance() : appSubclusterMap_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getAppSubclusterMap());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getAppSubclusterMap());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto) obj;
if (hasAppSubclusterMap() != other.hasAppSubclusterMap()) return false;
if (hasAppSubclusterMap()) {
if (!getAppSubclusterMap()
.equals(other.getAppSubclusterMap())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasAppSubclusterMap()) {
hash = (37 * hash) + APP_SUBCLUSTER_MAP_FIELD_NUMBER;
hash = (53 * hash) + getAppSubclusterMap().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.UpdateReservationHomeSubClusterRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.UpdateReservationHomeSubClusterRequestProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_UpdateReservationHomeSubClusterRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_UpdateReservationHomeSubClusterRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getAppSubclusterMapFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
appSubclusterMap_ = null;
if (appSubclusterMapBuilder_ != null) {
appSubclusterMapBuilder_.dispose();
appSubclusterMapBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_UpdateReservationHomeSubClusterRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.appSubclusterMap_ = appSubclusterMapBuilder_ == null
? appSubclusterMap_
: appSubclusterMapBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto.getDefaultInstance()) return this;
if (other.hasAppSubclusterMap()) {
mergeAppSubclusterMap(other.getAppSubclusterMap());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getAppSubclusterMapFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto appSubclusterMap_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProtoOrBuilder> appSubclusterMapBuilder_;
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
* @return Whether the appSubclusterMap field is set.
*/
public boolean hasAppSubclusterMap() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
* @return The appSubclusterMap.
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto getAppSubclusterMap() {
if (appSubclusterMapBuilder_ == null) {
return appSubclusterMap_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.getDefaultInstance() : appSubclusterMap_;
} else {
return appSubclusterMapBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder setAppSubclusterMap(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto value) {
if (appSubclusterMapBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
appSubclusterMap_ = value;
} else {
appSubclusterMapBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder setAppSubclusterMap(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.Builder builderForValue) {
if (appSubclusterMapBuilder_ == null) {
appSubclusterMap_ = builderForValue.build();
} else {
appSubclusterMapBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder mergeAppSubclusterMap(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto value) {
if (appSubclusterMapBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
appSubclusterMap_ != null &&
appSubclusterMap_ != org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.getDefaultInstance()) {
getAppSubclusterMapBuilder().mergeFrom(value);
} else {
appSubclusterMap_ = value;
}
} else {
appSubclusterMapBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder clearAppSubclusterMap() {
bitField0_ = (bitField0_ & ~0x00000001);
appSubclusterMap_ = null;
if (appSubclusterMapBuilder_ != null) {
appSubclusterMapBuilder_.dispose();
appSubclusterMapBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.Builder getAppSubclusterMapBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getAppSubclusterMapFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProtoOrBuilder getAppSubclusterMapOrBuilder() {
if (appSubclusterMapBuilder_ != null) {
return appSubclusterMapBuilder_.getMessageOrBuilder();
} else {
return appSubclusterMap_ == null ?
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.getDefaultInstance() : appSubclusterMap_;
}
}
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProtoOrBuilder>
getAppSubclusterMapFieldBuilder() {
if (appSubclusterMapBuilder_ == null) {
appSubclusterMapBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProtoOrBuilder>(
getAppSubclusterMap(),
getParentForChildren(),
isClean());
appSubclusterMap_ = null;
}
return appSubclusterMapBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.UpdateReservationHomeSubClusterRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.UpdateReservationHomeSubClusterRequestProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public UpdateReservationHomeSubClusterRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface UpdateReservationHomeSubClusterResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.UpdateReservationHomeSubClusterResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.yarn.UpdateReservationHomeSubClusterResponseProto}
*/
public static final class UpdateReservationHomeSubClusterResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.UpdateReservationHomeSubClusterResponseProto)
UpdateReservationHomeSubClusterResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateReservationHomeSubClusterResponseProto.newBuilder() to construct.
private UpdateReservationHomeSubClusterResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private UpdateReservationHomeSubClusterResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new UpdateReservationHomeSubClusterResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_UpdateReservationHomeSubClusterResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_UpdateReservationHomeSubClusterResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.UpdateReservationHomeSubClusterResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.UpdateReservationHomeSubClusterResponseProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_UpdateReservationHomeSubClusterResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_UpdateReservationHomeSubClusterResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_UpdateReservationHomeSubClusterResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.UpdateReservationHomeSubClusterResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.UpdateReservationHomeSubClusterResponseProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public UpdateReservationHomeSubClusterResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.UpdateReservationHomeSubClusterResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetReservationHomeSubClusterRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.GetReservationHomeSubClusterRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
* @return Whether the reservationId field is set.
*/
boolean hasReservationId();
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
* @return The reservationId.
*/
org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId();
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
*/
org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder();
}
/**
* Protobuf type {@code hadoop.yarn.GetReservationHomeSubClusterRequestProto}
*/
public static final class GetReservationHomeSubClusterRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.GetReservationHomeSubClusterRequestProto)
GetReservationHomeSubClusterRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetReservationHomeSubClusterRequestProto.newBuilder() to construct.
private GetReservationHomeSubClusterRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetReservationHomeSubClusterRequestProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetReservationHomeSubClusterRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetReservationHomeSubClusterRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetReservationHomeSubClusterRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto.Builder.class);
}
private int bitField0_;
public static final int RESERVATION_ID_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto reservationId_;
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
* @return Whether the reservationId field is set.
*/
@java.lang.Override
public boolean hasReservationId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
* @return The reservationId.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId() {
return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_;
}
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder() {
return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getReservationId());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getReservationId());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto) obj;
if (hasReservationId() != other.hasReservationId()) return false;
if (hasReservationId()) {
if (!getReservationId()
.equals(other.getReservationId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasReservationId()) {
hash = (37 * hash) + RESERVATION_ID_FIELD_NUMBER;
hash = (53 * hash) + getReservationId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.GetReservationHomeSubClusterRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.GetReservationHomeSubClusterRequestProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetReservationHomeSubClusterRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetReservationHomeSubClusterRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getReservationIdFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
reservationId_ = null;
if (reservationIdBuilder_ != null) {
reservationIdBuilder_.dispose();
reservationIdBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetReservationHomeSubClusterRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.reservationId_ = reservationIdBuilder_ == null
? reservationId_
: reservationIdBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto.getDefaultInstance()) return this;
if (other.hasReservationId()) {
mergeReservationId(other.getReservationId());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getReservationIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto reservationId_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder> reservationIdBuilder_;
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
* @return Whether the reservationId field is set.
*/
public boolean hasReservationId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
* @return The reservationId.
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId() {
if (reservationIdBuilder_ == null) {
return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_;
} else {
return reservationIdBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
*/
public Builder setReservationId(org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto value) {
if (reservationIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
reservationId_ = value;
} else {
reservationIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
*/
public Builder setReservationId(
org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder builderForValue) {
if (reservationIdBuilder_ == null) {
reservationId_ = builderForValue.build();
} else {
reservationIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
*/
public Builder mergeReservationId(org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto value) {
if (reservationIdBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
reservationId_ != null &&
reservationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance()) {
getReservationIdBuilder().mergeFrom(value);
} else {
reservationId_ = value;
}
} else {
reservationIdBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
*/
public Builder clearReservationId() {
bitField0_ = (bitField0_ & ~0x00000001);
reservationId_ = null;
if (reservationIdBuilder_ != null) {
reservationIdBuilder_.dispose();
reservationIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder getReservationIdBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getReservationIdFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder() {
if (reservationIdBuilder_ != null) {
return reservationIdBuilder_.getMessageOrBuilder();
} else {
return reservationId_ == null ?
org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_;
}
}
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder>
getReservationIdFieldBuilder() {
if (reservationIdBuilder_ == null) {
reservationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder>(
getReservationId(),
getParentForChildren(),
isClean());
reservationId_ = null;
}
return reservationIdBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.GetReservationHomeSubClusterRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.GetReservationHomeSubClusterRequestProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetReservationHomeSubClusterRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetReservationHomeSubClusterResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.GetReservationHomeSubClusterResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
* @return Whether the appSubclusterMap field is set.
*/
boolean hasAppSubclusterMap();
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
* @return The appSubclusterMap.
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto getAppSubclusterMap();
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProtoOrBuilder getAppSubclusterMapOrBuilder();
}
/**
* Protobuf type {@code hadoop.yarn.GetReservationHomeSubClusterResponseProto}
*/
public static final class GetReservationHomeSubClusterResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.GetReservationHomeSubClusterResponseProto)
GetReservationHomeSubClusterResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetReservationHomeSubClusterResponseProto.newBuilder() to construct.
private GetReservationHomeSubClusterResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetReservationHomeSubClusterResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetReservationHomeSubClusterResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetReservationHomeSubClusterResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetReservationHomeSubClusterResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto.Builder.class);
}
private int bitField0_;
public static final int APP_SUBCLUSTER_MAP_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto appSubclusterMap_;
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
* @return Whether the appSubclusterMap field is set.
*/
@java.lang.Override
public boolean hasAppSubclusterMap() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
* @return The appSubclusterMap.
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto getAppSubclusterMap() {
return appSubclusterMap_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.getDefaultInstance() : appSubclusterMap_;
}
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProtoOrBuilder getAppSubclusterMapOrBuilder() {
return appSubclusterMap_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.getDefaultInstance() : appSubclusterMap_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getAppSubclusterMap());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getAppSubclusterMap());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto) obj;
if (hasAppSubclusterMap() != other.hasAppSubclusterMap()) return false;
if (hasAppSubclusterMap()) {
if (!getAppSubclusterMap()
.equals(other.getAppSubclusterMap())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasAppSubclusterMap()) {
hash = (37 * hash) + APP_SUBCLUSTER_MAP_FIELD_NUMBER;
hash = (53 * hash) + getAppSubclusterMap().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.GetReservationHomeSubClusterResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.GetReservationHomeSubClusterResponseProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetReservationHomeSubClusterResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetReservationHomeSubClusterResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getAppSubclusterMapFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
appSubclusterMap_ = null;
if (appSubclusterMapBuilder_ != null) {
appSubclusterMapBuilder_.dispose();
appSubclusterMapBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetReservationHomeSubClusterResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.appSubclusterMap_ = appSubclusterMapBuilder_ == null
? appSubclusterMap_
: appSubclusterMapBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto.getDefaultInstance()) return this;
if (other.hasAppSubclusterMap()) {
mergeAppSubclusterMap(other.getAppSubclusterMap());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getAppSubclusterMapFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto appSubclusterMap_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProtoOrBuilder> appSubclusterMapBuilder_;
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
* @return Whether the appSubclusterMap field is set.
*/
public boolean hasAppSubclusterMap() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
* @return The appSubclusterMap.
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto getAppSubclusterMap() {
if (appSubclusterMapBuilder_ == null) {
return appSubclusterMap_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.getDefaultInstance() : appSubclusterMap_;
} else {
return appSubclusterMapBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder setAppSubclusterMap(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto value) {
if (appSubclusterMapBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
appSubclusterMap_ = value;
} else {
appSubclusterMapBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder setAppSubclusterMap(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.Builder builderForValue) {
if (appSubclusterMapBuilder_ == null) {
appSubclusterMap_ = builderForValue.build();
} else {
appSubclusterMapBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder mergeAppSubclusterMap(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto value) {
if (appSubclusterMapBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
appSubclusterMap_ != null &&
appSubclusterMap_ != org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.getDefaultInstance()) {
getAppSubclusterMapBuilder().mergeFrom(value);
} else {
appSubclusterMap_ = value;
}
} else {
appSubclusterMapBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder clearAppSubclusterMap() {
bitField0_ = (bitField0_ & ~0x00000001);
appSubclusterMap_ = null;
if (appSubclusterMapBuilder_ != null) {
appSubclusterMapBuilder_.dispose();
appSubclusterMapBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.Builder getAppSubclusterMapBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getAppSubclusterMapFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProtoOrBuilder getAppSubclusterMapOrBuilder() {
if (appSubclusterMapBuilder_ != null) {
return appSubclusterMapBuilder_.getMessageOrBuilder();
} else {
return appSubclusterMap_ == null ?
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.getDefaultInstance() : appSubclusterMap_;
}
}
/**
* optional .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProtoOrBuilder>
getAppSubclusterMapFieldBuilder() {
if (appSubclusterMapBuilder_ == null) {
appSubclusterMapBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProtoOrBuilder>(
getAppSubclusterMap(),
getParentForChildren(),
isClean());
appSubclusterMap_ = null;
}
return appSubclusterMapBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.GetReservationHomeSubClusterResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.GetReservationHomeSubClusterResponseProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetReservationHomeSubClusterResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationHomeSubClusterResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetReservationsHomeSubClusterRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.GetReservationsHomeSubClusterRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.yarn.GetReservationsHomeSubClusterRequestProto}
*/
public static final class GetReservationsHomeSubClusterRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.GetReservationsHomeSubClusterRequestProto)
GetReservationsHomeSubClusterRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetReservationsHomeSubClusterRequestProto.newBuilder() to construct.
private GetReservationsHomeSubClusterRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetReservationsHomeSubClusterRequestProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetReservationsHomeSubClusterRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetReservationsHomeSubClusterRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetReservationsHomeSubClusterRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.GetReservationsHomeSubClusterRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.GetReservationsHomeSubClusterRequestProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetReservationsHomeSubClusterRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetReservationsHomeSubClusterRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetReservationsHomeSubClusterRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.GetReservationsHomeSubClusterRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.GetReservationsHomeSubClusterRequestProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetReservationsHomeSubClusterRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetReservationsHomeSubClusterResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.GetReservationsHomeSubClusterResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* repeated .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
java.util.List
getAppSubclusterMapList();
/**
* repeated .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto getAppSubclusterMap(int index);
/**
* repeated .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
int getAppSubclusterMapCount();
/**
* repeated .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
java.util.List extends org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProtoOrBuilder>
getAppSubclusterMapOrBuilderList();
/**
* repeated .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProtoOrBuilder getAppSubclusterMapOrBuilder(
int index);
}
/**
* Protobuf type {@code hadoop.yarn.GetReservationsHomeSubClusterResponseProto}
*/
public static final class GetReservationsHomeSubClusterResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.GetReservationsHomeSubClusterResponseProto)
GetReservationsHomeSubClusterResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetReservationsHomeSubClusterResponseProto.newBuilder() to construct.
private GetReservationsHomeSubClusterResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetReservationsHomeSubClusterResponseProto() {
appSubclusterMap_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetReservationsHomeSubClusterResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetReservationsHomeSubClusterResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetReservationsHomeSubClusterResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto.Builder.class);
}
public static final int APP_SUBCLUSTER_MAP_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List appSubclusterMap_;
/**
* repeated .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
@java.lang.Override
public java.util.List getAppSubclusterMapList() {
return appSubclusterMap_;
}
/**
* repeated .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
@java.lang.Override
public java.util.List extends org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProtoOrBuilder>
getAppSubclusterMapOrBuilderList() {
return appSubclusterMap_;
}
/**
* repeated .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
@java.lang.Override
public int getAppSubclusterMapCount() {
return appSubclusterMap_.size();
}
/**
* repeated .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto getAppSubclusterMap(int index) {
return appSubclusterMap_.get(index);
}
/**
* repeated .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProtoOrBuilder getAppSubclusterMapOrBuilder(
int index) {
return appSubclusterMap_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < appSubclusterMap_.size(); i++) {
output.writeMessage(1, appSubclusterMap_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < appSubclusterMap_.size(); i++) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, appSubclusterMap_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto) obj;
if (!getAppSubclusterMapList()
.equals(other.getAppSubclusterMapList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getAppSubclusterMapCount() > 0) {
hash = (37 * hash) + APP_SUBCLUSTER_MAP_FIELD_NUMBER;
hash = (53 * hash) + getAppSubclusterMapList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.GetReservationsHomeSubClusterResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.GetReservationsHomeSubClusterResponseProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetReservationsHomeSubClusterResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetReservationsHomeSubClusterResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (appSubclusterMapBuilder_ == null) {
appSubclusterMap_ = java.util.Collections.emptyList();
} else {
appSubclusterMap_ = null;
appSubclusterMapBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetReservationsHomeSubClusterResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto result) {
if (appSubclusterMapBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
appSubclusterMap_ = java.util.Collections.unmodifiableList(appSubclusterMap_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.appSubclusterMap_ = appSubclusterMap_;
} else {
result.appSubclusterMap_ = appSubclusterMapBuilder_.build();
}
}
private void buildPartial0(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto.getDefaultInstance()) return this;
if (appSubclusterMapBuilder_ == null) {
if (!other.appSubclusterMap_.isEmpty()) {
if (appSubclusterMap_.isEmpty()) {
appSubclusterMap_ = other.appSubclusterMap_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureAppSubclusterMapIsMutable();
appSubclusterMap_.addAll(other.appSubclusterMap_);
}
onChanged();
}
} else {
if (!other.appSubclusterMap_.isEmpty()) {
if (appSubclusterMapBuilder_.isEmpty()) {
appSubclusterMapBuilder_.dispose();
appSubclusterMapBuilder_ = null;
appSubclusterMap_ = other.appSubclusterMap_;
bitField0_ = (bitField0_ & ~0x00000001);
appSubclusterMapBuilder_ =
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getAppSubclusterMapFieldBuilder() : null;
} else {
appSubclusterMapBuilder_.addAllMessages(other.appSubclusterMap_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto m =
input.readMessage(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.PARSER,
extensionRegistry);
if (appSubclusterMapBuilder_ == null) {
ensureAppSubclusterMapIsMutable();
appSubclusterMap_.add(m);
} else {
appSubclusterMapBuilder_.addMessage(m);
}
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List appSubclusterMap_ =
java.util.Collections.emptyList();
private void ensureAppSubclusterMapIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
appSubclusterMap_ = new java.util.ArrayList(appSubclusterMap_);
bitField0_ |= 0x00000001;
}
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProtoOrBuilder> appSubclusterMapBuilder_;
/**
* repeated .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
public java.util.List getAppSubclusterMapList() {
if (appSubclusterMapBuilder_ == null) {
return java.util.Collections.unmodifiableList(appSubclusterMap_);
} else {
return appSubclusterMapBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
public int getAppSubclusterMapCount() {
if (appSubclusterMapBuilder_ == null) {
return appSubclusterMap_.size();
} else {
return appSubclusterMapBuilder_.getCount();
}
}
/**
* repeated .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto getAppSubclusterMap(int index) {
if (appSubclusterMapBuilder_ == null) {
return appSubclusterMap_.get(index);
} else {
return appSubclusterMapBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder setAppSubclusterMap(
int index, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto value) {
if (appSubclusterMapBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAppSubclusterMapIsMutable();
appSubclusterMap_.set(index, value);
onChanged();
} else {
appSubclusterMapBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder setAppSubclusterMap(
int index, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.Builder builderForValue) {
if (appSubclusterMapBuilder_ == null) {
ensureAppSubclusterMapIsMutable();
appSubclusterMap_.set(index, builderForValue.build());
onChanged();
} else {
appSubclusterMapBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder addAppSubclusterMap(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto value) {
if (appSubclusterMapBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAppSubclusterMapIsMutable();
appSubclusterMap_.add(value);
onChanged();
} else {
appSubclusterMapBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder addAppSubclusterMap(
int index, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto value) {
if (appSubclusterMapBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAppSubclusterMapIsMutable();
appSubclusterMap_.add(index, value);
onChanged();
} else {
appSubclusterMapBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder addAppSubclusterMap(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.Builder builderForValue) {
if (appSubclusterMapBuilder_ == null) {
ensureAppSubclusterMapIsMutable();
appSubclusterMap_.add(builderForValue.build());
onChanged();
} else {
appSubclusterMapBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder addAppSubclusterMap(
int index, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.Builder builderForValue) {
if (appSubclusterMapBuilder_ == null) {
ensureAppSubclusterMapIsMutable();
appSubclusterMap_.add(index, builderForValue.build());
onChanged();
} else {
appSubclusterMapBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder addAllAppSubclusterMap(
java.lang.Iterable extends org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto> values) {
if (appSubclusterMapBuilder_ == null) {
ensureAppSubclusterMapIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, appSubclusterMap_);
onChanged();
} else {
appSubclusterMapBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder clearAppSubclusterMap() {
if (appSubclusterMapBuilder_ == null) {
appSubclusterMap_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
appSubclusterMapBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
public Builder removeAppSubclusterMap(int index) {
if (appSubclusterMapBuilder_ == null) {
ensureAppSubclusterMapIsMutable();
appSubclusterMap_.remove(index);
onChanged();
} else {
appSubclusterMapBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.Builder getAppSubclusterMapBuilder(
int index) {
return getAppSubclusterMapFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProtoOrBuilder getAppSubclusterMapOrBuilder(
int index) {
if (appSubclusterMapBuilder_ == null) {
return appSubclusterMap_.get(index); } else {
return appSubclusterMapBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
public java.util.List extends org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProtoOrBuilder>
getAppSubclusterMapOrBuilderList() {
if (appSubclusterMapBuilder_ != null) {
return appSubclusterMapBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(appSubclusterMap_);
}
}
/**
* repeated .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.Builder addAppSubclusterMapBuilder() {
return getAppSubclusterMapFieldBuilder().addBuilder(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.Builder addAppSubclusterMapBuilder(
int index) {
return getAppSubclusterMapFieldBuilder().addBuilder(
index, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.ReservationHomeSubClusterProto app_subcluster_map = 1;
*/
public java.util.List
getAppSubclusterMapBuilderList() {
return getAppSubclusterMapFieldBuilder().getBuilderList();
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProtoOrBuilder>
getAppSubclusterMapFieldBuilder() {
if (appSubclusterMapBuilder_ == null) {
appSubclusterMapBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.ReservationHomeSubClusterProtoOrBuilder>(
appSubclusterMap_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
appSubclusterMap_ = null;
}
return appSubclusterMapBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.GetReservationsHomeSubClusterResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.GetReservationsHomeSubClusterResponseProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetReservationsHomeSubClusterResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetReservationsHomeSubClusterResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface DeleteReservationHomeSubClusterRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.DeleteReservationHomeSubClusterRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
* @return Whether the reservationId field is set.
*/
boolean hasReservationId();
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
* @return The reservationId.
*/
org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId();
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
*/
org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder();
}
/**
* Protobuf type {@code hadoop.yarn.DeleteReservationHomeSubClusterRequestProto}
*/
public static final class DeleteReservationHomeSubClusterRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.DeleteReservationHomeSubClusterRequestProto)
DeleteReservationHomeSubClusterRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use DeleteReservationHomeSubClusterRequestProto.newBuilder() to construct.
private DeleteReservationHomeSubClusterRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private DeleteReservationHomeSubClusterRequestProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new DeleteReservationHomeSubClusterRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeleteReservationHomeSubClusterRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeleteReservationHomeSubClusterRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto.Builder.class);
}
private int bitField0_;
public static final int RESERVATION_ID_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto reservationId_;
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
* @return Whether the reservationId field is set.
*/
@java.lang.Override
public boolean hasReservationId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
* @return The reservationId.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId() {
return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_;
}
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder() {
return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getReservationId());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getReservationId());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto) obj;
if (hasReservationId() != other.hasReservationId()) return false;
if (hasReservationId()) {
if (!getReservationId()
.equals(other.getReservationId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasReservationId()) {
hash = (37 * hash) + RESERVATION_ID_FIELD_NUMBER;
hash = (53 * hash) + getReservationId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.DeleteReservationHomeSubClusterRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.DeleteReservationHomeSubClusterRequestProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeleteReservationHomeSubClusterRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeleteReservationHomeSubClusterRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getReservationIdFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
reservationId_ = null;
if (reservationIdBuilder_ != null) {
reservationIdBuilder_.dispose();
reservationIdBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeleteReservationHomeSubClusterRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.reservationId_ = reservationIdBuilder_ == null
? reservationId_
: reservationIdBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto.getDefaultInstance()) return this;
if (other.hasReservationId()) {
mergeReservationId(other.getReservationId());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getReservationIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto reservationId_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder> reservationIdBuilder_;
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
* @return Whether the reservationId field is set.
*/
public boolean hasReservationId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
* @return The reservationId.
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId() {
if (reservationIdBuilder_ == null) {
return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_;
} else {
return reservationIdBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
*/
public Builder setReservationId(org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto value) {
if (reservationIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
reservationId_ = value;
} else {
reservationIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
*/
public Builder setReservationId(
org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder builderForValue) {
if (reservationIdBuilder_ == null) {
reservationId_ = builderForValue.build();
} else {
reservationIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
*/
public Builder mergeReservationId(org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto value) {
if (reservationIdBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
reservationId_ != null &&
reservationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance()) {
getReservationIdBuilder().mergeFrom(value);
} else {
reservationId_ = value;
}
} else {
reservationIdBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
*/
public Builder clearReservationId() {
bitField0_ = (bitField0_ & ~0x00000001);
reservationId_ = null;
if (reservationIdBuilder_ != null) {
reservationIdBuilder_.dispose();
reservationIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder getReservationIdBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getReservationIdFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder() {
if (reservationIdBuilder_ != null) {
return reservationIdBuilder_.getMessageOrBuilder();
} else {
return reservationId_ == null ?
org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_;
}
}
/**
* optional .hadoop.yarn.ReservationIdProto reservation_id = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder>
getReservationIdFieldBuilder() {
if (reservationIdBuilder_ == null) {
reservationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder>(
getReservationId(),
getParentForChildren(),
isClean());
reservationId_ = null;
}
return reservationIdBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.DeleteReservationHomeSubClusterRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.DeleteReservationHomeSubClusterRequestProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public DeleteReservationHomeSubClusterRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface DeleteReservationHomeSubClusterResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.DeleteReservationHomeSubClusterResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.yarn.DeleteReservationHomeSubClusterResponseProto}
*/
public static final class DeleteReservationHomeSubClusterResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.DeleteReservationHomeSubClusterResponseProto)
DeleteReservationHomeSubClusterResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use DeleteReservationHomeSubClusterResponseProto.newBuilder() to construct.
private DeleteReservationHomeSubClusterResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private DeleteReservationHomeSubClusterResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new DeleteReservationHomeSubClusterResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeleteReservationHomeSubClusterResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeleteReservationHomeSubClusterResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.DeleteReservationHomeSubClusterResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.DeleteReservationHomeSubClusterResponseProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeleteReservationHomeSubClusterResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeleteReservationHomeSubClusterResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeleteReservationHomeSubClusterResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.DeleteReservationHomeSubClusterResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.DeleteReservationHomeSubClusterResponseProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public DeleteReservationHomeSubClusterResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteReservationHomeSubClusterResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface DeleteSubClusterPoliciesConfigurationsRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.DeleteSubClusterPoliciesConfigurationsRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* repeated string queues = 1;
* @return A list containing the queues.
*/
java.util.List
getQueuesList();
/**
* repeated string queues = 1;
* @return The count of queues.
*/
int getQueuesCount();
/**
* repeated string queues = 1;
* @param index The index of the element to return.
* @return The queues at the given index.
*/
java.lang.String getQueues(int index);
/**
* repeated string queues = 1;
* @param index The index of the value to return.
* @return The bytes of the queues at the given index.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getQueuesBytes(int index);
}
/**
* Protobuf type {@code hadoop.yarn.DeleteSubClusterPoliciesConfigurationsRequestProto}
*/
public static final class DeleteSubClusterPoliciesConfigurationsRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.DeleteSubClusterPoliciesConfigurationsRequestProto)
DeleteSubClusterPoliciesConfigurationsRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use DeleteSubClusterPoliciesConfigurationsRequestProto.newBuilder() to construct.
private DeleteSubClusterPoliciesConfigurationsRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private DeleteSubClusterPoliciesConfigurationsRequestProto() {
queues_ =
org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new DeleteSubClusterPoliciesConfigurationsRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeleteSubClusterPoliciesConfigurationsRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeleteSubClusterPoliciesConfigurationsRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto.Builder.class);
}
public static final int QUEUES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList queues_ =
org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
/**
* repeated string queues = 1;
* @return A list containing the queues.
*/
public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
getQueuesList() {
return queues_;
}
/**
* repeated string queues = 1;
* @return The count of queues.
*/
public int getQueuesCount() {
return queues_.size();
}
/**
* repeated string queues = 1;
* @param index The index of the element to return.
* @return The queues at the given index.
*/
public java.lang.String getQueues(int index) {
return queues_.get(index);
}
/**
* repeated string queues = 1;
* @param index The index of the value to return.
* @return The bytes of the queues at the given index.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getQueuesBytes(int index) {
return queues_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < queues_.size(); i++) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, queues_.getRaw(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < queues_.size(); i++) {
dataSize += computeStringSizeNoTag(queues_.getRaw(i));
}
size += dataSize;
size += 1 * getQueuesList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto) obj;
if (!getQueuesList()
.equals(other.getQueuesList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getQueuesCount() > 0) {
hash = (37 * hash) + QUEUES_FIELD_NUMBER;
hash = (53 * hash) + getQueuesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.DeleteSubClusterPoliciesConfigurationsRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.DeleteSubClusterPoliciesConfigurationsRequestProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeleteSubClusterPoliciesConfigurationsRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeleteSubClusterPoliciesConfigurationsRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
queues_ =
org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeleteSubClusterPoliciesConfigurationsRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
queues_.makeImmutable();
result.queues_ = queues_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto.getDefaultInstance()) return this;
if (!other.queues_.isEmpty()) {
if (queues_.isEmpty()) {
queues_ = other.queues_;
bitField0_ |= 0x00000001;
} else {
ensureQueuesIsMutable();
queues_.addAll(other.queues_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
ensureQueuesIsMutable();
queues_.add(bs);
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList queues_ =
org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
private void ensureQueuesIsMutable() {
if (!queues_.isModifiable()) {
queues_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(queues_);
}
bitField0_ |= 0x00000001;
}
/**
* repeated string queues = 1;
* @return A list containing the queues.
*/
public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
getQueuesList() {
queues_.makeImmutable();
return queues_;
}
/**
* repeated string queues = 1;
* @return The count of queues.
*/
public int getQueuesCount() {
return queues_.size();
}
/**
* repeated string queues = 1;
* @param index The index of the element to return.
* @return The queues at the given index.
*/
public java.lang.String getQueues(int index) {
return queues_.get(index);
}
/**
* repeated string queues = 1;
* @param index The index of the value to return.
* @return The bytes of the queues at the given index.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getQueuesBytes(int index) {
return queues_.getByteString(index);
}
/**
* repeated string queues = 1;
* @param index The index to set the value at.
* @param value The queues to set.
* @return This builder for chaining.
*/
public Builder setQueues(
int index, java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureQueuesIsMutable();
queues_.set(index, value);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* repeated string queues = 1;
* @param value The queues to add.
* @return This builder for chaining.
*/
public Builder addQueues(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureQueuesIsMutable();
queues_.add(value);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* repeated string queues = 1;
* @param values The queues to add.
* @return This builder for chaining.
*/
public Builder addAllQueues(
java.lang.Iterable values) {
ensureQueuesIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, queues_);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* repeated string queues = 1;
* @return This builder for chaining.
*/
public Builder clearQueues() {
queues_ =
org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);;
onChanged();
return this;
}
/**
* repeated string queues = 1;
* @param value The bytes of the queues to add.
* @return This builder for chaining.
*/
public Builder addQueuesBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
ensureQueuesIsMutable();
queues_.add(value);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.DeleteSubClusterPoliciesConfigurationsRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.DeleteSubClusterPoliciesConfigurationsRequestProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public DeleteSubClusterPoliciesConfigurationsRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface DeleteSubClusterPoliciesConfigurationsResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.DeleteSubClusterPoliciesConfigurationsResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.yarn.DeleteSubClusterPoliciesConfigurationsResponseProto}
*/
public static final class DeleteSubClusterPoliciesConfigurationsResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.DeleteSubClusterPoliciesConfigurationsResponseProto)
DeleteSubClusterPoliciesConfigurationsResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use DeleteSubClusterPoliciesConfigurationsResponseProto.newBuilder() to construct.
private DeleteSubClusterPoliciesConfigurationsResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private DeleteSubClusterPoliciesConfigurationsResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new DeleteSubClusterPoliciesConfigurationsResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeleteSubClusterPoliciesConfigurationsResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeleteSubClusterPoliciesConfigurationsResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.DeleteSubClusterPoliciesConfigurationsResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.DeleteSubClusterPoliciesConfigurationsResponseProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeleteSubClusterPoliciesConfigurationsResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeleteSubClusterPoliciesConfigurationsResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeleteSubClusterPoliciesConfigurationsResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.DeleteSubClusterPoliciesConfigurationsResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.DeleteSubClusterPoliciesConfigurationsResponseProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public DeleteSubClusterPoliciesConfigurationsResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeleteSubClusterPoliciesConfigurationsResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface DeletePoliciesConfigurationsRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.DeletePoliciesConfigurationsRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.yarn.DeletePoliciesConfigurationsRequestProto}
*/
public static final class DeletePoliciesConfigurationsRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.DeletePoliciesConfigurationsRequestProto)
DeletePoliciesConfigurationsRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use DeletePoliciesConfigurationsRequestProto.newBuilder() to construct.
private DeletePoliciesConfigurationsRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private DeletePoliciesConfigurationsRequestProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new DeletePoliciesConfigurationsRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeletePoliciesConfigurationsRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeletePoliciesConfigurationsRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.DeletePoliciesConfigurationsRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.DeletePoliciesConfigurationsRequestProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeletePoliciesConfigurationsRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeletePoliciesConfigurationsRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeletePoliciesConfigurationsRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.DeletePoliciesConfigurationsRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.DeletePoliciesConfigurationsRequestProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public DeletePoliciesConfigurationsRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface DeletePoliciesConfigurationsResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.DeletePoliciesConfigurationsResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.yarn.DeletePoliciesConfigurationsResponseProto}
*/
public static final class DeletePoliciesConfigurationsResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.DeletePoliciesConfigurationsResponseProto)
DeletePoliciesConfigurationsResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use DeletePoliciesConfigurationsResponseProto.newBuilder() to construct.
private DeletePoliciesConfigurationsResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private DeletePoliciesConfigurationsResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new DeletePoliciesConfigurationsResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeletePoliciesConfigurationsResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeletePoliciesConfigurationsResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.DeletePoliciesConfigurationsResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.DeletePoliciesConfigurationsResponseProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeletePoliciesConfigurationsResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeletePoliciesConfigurationsResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_DeletePoliciesConfigurationsResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.DeletePoliciesConfigurationsResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.DeletePoliciesConfigurationsResponseProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public DeletePoliciesConfigurationsResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.DeletePoliciesConfigurationsResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface SubClusterPolicyConfigurationProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.SubClusterPolicyConfigurationProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional string queue = 1;
* @return Whether the queue field is set.
*/
boolean hasQueue();
/**
* optional string queue = 1;
* @return The queue.
*/
java.lang.String getQueue();
/**
* optional string queue = 1;
* @return The bytes for queue.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getQueueBytes();
/**
* optional string type = 2;
* @return Whether the type field is set.
*/
boolean hasType();
/**
* optional string type = 2;
* @return The type.
*/
java.lang.String getType();
/**
* optional string type = 2;
* @return The bytes for type.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getTypeBytes();
/**
* optional bytes params = 3;
* @return Whether the params field is set.
*/
boolean hasParams();
/**
* optional bytes params = 3;
* @return The params.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString getParams();
}
/**
* Protobuf type {@code hadoop.yarn.SubClusterPolicyConfigurationProto}
*/
public static final class SubClusterPolicyConfigurationProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.SubClusterPolicyConfigurationProto)
SubClusterPolicyConfigurationProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use SubClusterPolicyConfigurationProto.newBuilder() to construct.
private SubClusterPolicyConfigurationProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private SubClusterPolicyConfigurationProto() {
queue_ = "";
type_ = "";
params_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new SubClusterPolicyConfigurationProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterPolicyConfigurationProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterPolicyConfigurationProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto.Builder.class);
}
private int bitField0_;
public static final int QUEUE_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object queue_ = "";
/**
* optional string queue = 1;
* @return Whether the queue field is set.
*/
@java.lang.Override
public boolean hasQueue() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string queue = 1;
* @return The queue.
*/
@java.lang.Override
public java.lang.String getQueue() {
java.lang.Object ref = queue_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
queue_ = s;
}
return s;
}
}
/**
* optional string queue = 1;
* @return The bytes for queue.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getQueueBytes() {
java.lang.Object ref = queue_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
queue_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int TYPE_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object type_ = "";
/**
* optional string type = 2;
* @return Whether the type field is set.
*/
@java.lang.Override
public boolean hasType() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional string type = 2;
* @return The type.
*/
@java.lang.Override
public java.lang.String getType() {
java.lang.Object ref = type_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
type_ = s;
}
return s;
}
}
/**
* optional string type = 2;
* @return The bytes for type.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getTypeBytes() {
java.lang.Object ref = type_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
type_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int PARAMS_FIELD_NUMBER = 3;
private org.apache.hadoop.thirdparty.protobuf.ByteString params_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
/**
* optional bytes params = 3;
* @return Whether the params field is set.
*/
@java.lang.Override
public boolean hasParams() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional bytes params = 3;
* @return The params.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString getParams() {
return params_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, queue_);
}
if (((bitField0_ & 0x00000002) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, type_);
}
if (((bitField0_ & 0x00000004) != 0)) {
output.writeBytes(3, params_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, queue_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, type_);
}
if (((bitField0_ & 0x00000004) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeBytesSize(3, params_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto) obj;
if (hasQueue() != other.hasQueue()) return false;
if (hasQueue()) {
if (!getQueue()
.equals(other.getQueue())) return false;
}
if (hasType() != other.hasType()) return false;
if (hasType()) {
if (!getType()
.equals(other.getType())) return false;
}
if (hasParams() != other.hasParams()) return false;
if (hasParams()) {
if (!getParams()
.equals(other.getParams())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasQueue()) {
hash = (37 * hash) + QUEUE_FIELD_NUMBER;
hash = (53 * hash) + getQueue().hashCode();
}
if (hasType()) {
hash = (37 * hash) + TYPE_FIELD_NUMBER;
hash = (53 * hash) + getType().hashCode();
}
if (hasParams()) {
hash = (37 * hash) + PARAMS_FIELD_NUMBER;
hash = (53 * hash) + getParams().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.SubClusterPolicyConfigurationProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.SubClusterPolicyConfigurationProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterPolicyConfigurationProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterPolicyConfigurationProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
queue_ = "";
type_ = "";
params_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SubClusterPolicyConfigurationProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.queue_ = queue_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.type_ = type_;
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.params_ = params_;
to_bitField0_ |= 0x00000004;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto.getDefaultInstance()) return this;
if (other.hasQueue()) {
queue_ = other.queue_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasType()) {
type_ = other.type_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasParams()) {
setParams(other.getParams());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
queue_ = input.readBytes();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
type_ = input.readBytes();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26: {
params_ = input.readBytes();
bitField0_ |= 0x00000004;
break;
} // case 26
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object queue_ = "";
/**
* optional string queue = 1;
* @return Whether the queue field is set.
*/
public boolean hasQueue() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string queue = 1;
* @return The queue.
*/
public java.lang.String getQueue() {
java.lang.Object ref = queue_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
queue_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string queue = 1;
* @return The bytes for queue.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getQueueBytes() {
java.lang.Object ref = queue_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
queue_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string queue = 1;
* @param value The queue to set.
* @return This builder for chaining.
*/
public Builder setQueue(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
queue_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional string queue = 1;
* @return This builder for chaining.
*/
public Builder clearQueue() {
queue_ = getDefaultInstance().getQueue();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* optional string queue = 1;
* @param value The bytes for queue to set.
* @return This builder for chaining.
*/
public Builder setQueueBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
queue_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object type_ = "";
/**
* optional string type = 2;
* @return Whether the type field is set.
*/
public boolean hasType() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional string type = 2;
* @return The type.
*/
public java.lang.String getType() {
java.lang.Object ref = type_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
type_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string type = 2;
* @return The bytes for type.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getTypeBytes() {
java.lang.Object ref = type_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
type_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string type = 2;
* @param value The type to set.
* @return This builder for chaining.
*/
public Builder setType(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
type_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* optional string type = 2;
* @return This builder for chaining.
*/
public Builder clearType() {
type_ = getDefaultInstance().getType();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* optional string type = 2;
* @param value The bytes for type to set.
* @return This builder for chaining.
*/
public Builder setTypeBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
type_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private org.apache.hadoop.thirdparty.protobuf.ByteString params_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
/**
* optional bytes params = 3;
* @return Whether the params field is set.
*/
@java.lang.Override
public boolean hasParams() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional bytes params = 3;
* @return The params.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString getParams() {
return params_;
}
/**
* optional bytes params = 3;
* @param value The params to set.
* @return This builder for chaining.
*/
public Builder setParams(org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
params_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* optional bytes params = 3;
* @return This builder for chaining.
*/
public Builder clearParams() {
bitField0_ = (bitField0_ & ~0x00000004);
params_ = getDefaultInstance().getParams();
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.SubClusterPolicyConfigurationProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.SubClusterPolicyConfigurationProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public SubClusterPolicyConfigurationProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetSubClusterPolicyConfigurationRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.GetSubClusterPolicyConfigurationRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional string queue = 1;
* @return Whether the queue field is set.
*/
boolean hasQueue();
/**
* optional string queue = 1;
* @return The queue.
*/
java.lang.String getQueue();
/**
* optional string queue = 1;
* @return The bytes for queue.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getQueueBytes();
}
/**
* Protobuf type {@code hadoop.yarn.GetSubClusterPolicyConfigurationRequestProto}
*/
public static final class GetSubClusterPolicyConfigurationRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.GetSubClusterPolicyConfigurationRequestProto)
GetSubClusterPolicyConfigurationRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetSubClusterPolicyConfigurationRequestProto.newBuilder() to construct.
private GetSubClusterPolicyConfigurationRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetSubClusterPolicyConfigurationRequestProto() {
queue_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetSubClusterPolicyConfigurationRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetSubClusterPolicyConfigurationRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetSubClusterPolicyConfigurationRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto.Builder.class);
}
private int bitField0_;
public static final int QUEUE_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object queue_ = "";
/**
* optional string queue = 1;
* @return Whether the queue field is set.
*/
@java.lang.Override
public boolean hasQueue() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string queue = 1;
* @return The queue.
*/
@java.lang.Override
public java.lang.String getQueue() {
java.lang.Object ref = queue_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
queue_ = s;
}
return s;
}
}
/**
* optional string queue = 1;
* @return The bytes for queue.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getQueueBytes() {
java.lang.Object ref = queue_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
queue_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, queue_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, queue_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto) obj;
if (hasQueue() != other.hasQueue()) return false;
if (hasQueue()) {
if (!getQueue()
.equals(other.getQueue())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasQueue()) {
hash = (37 * hash) + QUEUE_FIELD_NUMBER;
hash = (53 * hash) + getQueue().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.GetSubClusterPolicyConfigurationRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.GetSubClusterPolicyConfigurationRequestProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetSubClusterPolicyConfigurationRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetSubClusterPolicyConfigurationRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
queue_ = "";
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetSubClusterPolicyConfigurationRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.queue_ = queue_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto.getDefaultInstance()) return this;
if (other.hasQueue()) {
queue_ = other.queue_;
bitField0_ |= 0x00000001;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
queue_ = input.readBytes();
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object queue_ = "";
/**
* optional string queue = 1;
* @return Whether the queue field is set.
*/
public boolean hasQueue() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string queue = 1;
* @return The queue.
*/
public java.lang.String getQueue() {
java.lang.Object ref = queue_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
queue_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string queue = 1;
* @return The bytes for queue.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getQueueBytes() {
java.lang.Object ref = queue_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
queue_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string queue = 1;
* @param value The queue to set.
* @return This builder for chaining.
*/
public Builder setQueue(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
queue_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional string queue = 1;
* @return This builder for chaining.
*/
public Builder clearQueue() {
queue_ = getDefaultInstance().getQueue();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* optional string queue = 1;
* @param value The bytes for queue to set.
* @return This builder for chaining.
*/
public Builder setQueueBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
queue_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.GetSubClusterPolicyConfigurationRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.GetSubClusterPolicyConfigurationRequestProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetSubClusterPolicyConfigurationRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetSubClusterPolicyConfigurationResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.GetSubClusterPolicyConfigurationResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.SubClusterPolicyConfigurationProto policy_configuration = 1;
* @return Whether the policyConfiguration field is set.
*/
boolean hasPolicyConfiguration();
/**
* optional .hadoop.yarn.SubClusterPolicyConfigurationProto policy_configuration = 1;
* @return The policyConfiguration.
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto getPolicyConfiguration();
/**
* optional .hadoop.yarn.SubClusterPolicyConfigurationProto policy_configuration = 1;
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProtoOrBuilder getPolicyConfigurationOrBuilder();
}
/**
* Protobuf type {@code hadoop.yarn.GetSubClusterPolicyConfigurationResponseProto}
*/
public static final class GetSubClusterPolicyConfigurationResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.GetSubClusterPolicyConfigurationResponseProto)
GetSubClusterPolicyConfigurationResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetSubClusterPolicyConfigurationResponseProto.newBuilder() to construct.
private GetSubClusterPolicyConfigurationResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetSubClusterPolicyConfigurationResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetSubClusterPolicyConfigurationResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetSubClusterPolicyConfigurationResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetSubClusterPolicyConfigurationResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto.Builder.class);
}
private int bitField0_;
public static final int POLICY_CONFIGURATION_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto policyConfiguration_;
/**
* optional .hadoop.yarn.SubClusterPolicyConfigurationProto policy_configuration = 1;
* @return Whether the policyConfiguration field is set.
*/
@java.lang.Override
public boolean hasPolicyConfiguration() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.SubClusterPolicyConfigurationProto policy_configuration = 1;
* @return The policyConfiguration.
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto getPolicyConfiguration() {
return policyConfiguration_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto.getDefaultInstance() : policyConfiguration_;
}
/**
* optional .hadoop.yarn.SubClusterPolicyConfigurationProto policy_configuration = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProtoOrBuilder getPolicyConfigurationOrBuilder() {
return policyConfiguration_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto.getDefaultInstance() : policyConfiguration_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getPolicyConfiguration());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getPolicyConfiguration());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto) obj;
if (hasPolicyConfiguration() != other.hasPolicyConfiguration()) return false;
if (hasPolicyConfiguration()) {
if (!getPolicyConfiguration()
.equals(other.getPolicyConfiguration())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasPolicyConfiguration()) {
hash = (37 * hash) + POLICY_CONFIGURATION_FIELD_NUMBER;
hash = (53 * hash) + getPolicyConfiguration().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.GetSubClusterPolicyConfigurationResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.GetSubClusterPolicyConfigurationResponseProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetSubClusterPolicyConfigurationResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetSubClusterPolicyConfigurationResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getPolicyConfigurationFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
policyConfiguration_ = null;
if (policyConfigurationBuilder_ != null) {
policyConfigurationBuilder_.dispose();
policyConfigurationBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetSubClusterPolicyConfigurationResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.policyConfiguration_ = policyConfigurationBuilder_ == null
? policyConfiguration_
: policyConfigurationBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto.getDefaultInstance()) return this;
if (other.hasPolicyConfiguration()) {
mergePolicyConfiguration(other.getPolicyConfiguration());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getPolicyConfigurationFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto policyConfiguration_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProtoOrBuilder> policyConfigurationBuilder_;
/**
* optional .hadoop.yarn.SubClusterPolicyConfigurationProto policy_configuration = 1;
* @return Whether the policyConfiguration field is set.
*/
public boolean hasPolicyConfiguration() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.SubClusterPolicyConfigurationProto policy_configuration = 1;
* @return The policyConfiguration.
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto getPolicyConfiguration() {
if (policyConfigurationBuilder_ == null) {
return policyConfiguration_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto.getDefaultInstance() : policyConfiguration_;
} else {
return policyConfigurationBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.SubClusterPolicyConfigurationProto policy_configuration = 1;
*/
public Builder setPolicyConfiguration(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto value) {
if (policyConfigurationBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
policyConfiguration_ = value;
} else {
policyConfigurationBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterPolicyConfigurationProto policy_configuration = 1;
*/
public Builder setPolicyConfiguration(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto.Builder builderForValue) {
if (policyConfigurationBuilder_ == null) {
policyConfiguration_ = builderForValue.build();
} else {
policyConfigurationBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterPolicyConfigurationProto policy_configuration = 1;
*/
public Builder mergePolicyConfiguration(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto value) {
if (policyConfigurationBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
policyConfiguration_ != null &&
policyConfiguration_ != org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto.getDefaultInstance()) {
getPolicyConfigurationBuilder().mergeFrom(value);
} else {
policyConfiguration_ = value;
}
} else {
policyConfigurationBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterPolicyConfigurationProto policy_configuration = 1;
*/
public Builder clearPolicyConfiguration() {
bitField0_ = (bitField0_ & ~0x00000001);
policyConfiguration_ = null;
if (policyConfigurationBuilder_ != null) {
policyConfigurationBuilder_.dispose();
policyConfigurationBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterPolicyConfigurationProto policy_configuration = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto.Builder getPolicyConfigurationBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getPolicyConfigurationFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.SubClusterPolicyConfigurationProto policy_configuration = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProtoOrBuilder getPolicyConfigurationOrBuilder() {
if (policyConfigurationBuilder_ != null) {
return policyConfigurationBuilder_.getMessageOrBuilder();
} else {
return policyConfiguration_ == null ?
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto.getDefaultInstance() : policyConfiguration_;
}
}
/**
* optional .hadoop.yarn.SubClusterPolicyConfigurationProto policy_configuration = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProtoOrBuilder>
getPolicyConfigurationFieldBuilder() {
if (policyConfigurationBuilder_ == null) {
policyConfigurationBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProtoOrBuilder>(
getPolicyConfiguration(),
getParentForChildren(),
isClean());
policyConfiguration_ = null;
}
return policyConfigurationBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.GetSubClusterPolicyConfigurationResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.GetSubClusterPolicyConfigurationResponseProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetSubClusterPolicyConfigurationResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPolicyConfigurationResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface SetSubClusterPolicyConfigurationRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.SetSubClusterPolicyConfigurationRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.SubClusterPolicyConfigurationProto policy_configuration = 1;
* @return Whether the policyConfiguration field is set.
*/
boolean hasPolicyConfiguration();
/**
* optional .hadoop.yarn.SubClusterPolicyConfigurationProto policy_configuration = 1;
* @return The policyConfiguration.
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto getPolicyConfiguration();
/**
* optional .hadoop.yarn.SubClusterPolicyConfigurationProto policy_configuration = 1;
*/
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProtoOrBuilder getPolicyConfigurationOrBuilder();
}
/**
* Protobuf type {@code hadoop.yarn.SetSubClusterPolicyConfigurationRequestProto}
*/
public static final class SetSubClusterPolicyConfigurationRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.SetSubClusterPolicyConfigurationRequestProto)
SetSubClusterPolicyConfigurationRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use SetSubClusterPolicyConfigurationRequestProto.newBuilder() to construct.
private SetSubClusterPolicyConfigurationRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private SetSubClusterPolicyConfigurationRequestProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new SetSubClusterPolicyConfigurationRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SetSubClusterPolicyConfigurationRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SetSubClusterPolicyConfigurationRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto.Builder.class);
}
private int bitField0_;
public static final int POLICY_CONFIGURATION_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto policyConfiguration_;
/**
* optional .hadoop.yarn.SubClusterPolicyConfigurationProto policy_configuration = 1;
* @return Whether the policyConfiguration field is set.
*/
@java.lang.Override
public boolean hasPolicyConfiguration() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.SubClusterPolicyConfigurationProto policy_configuration = 1;
* @return The policyConfiguration.
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto getPolicyConfiguration() {
return policyConfiguration_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto.getDefaultInstance() : policyConfiguration_;
}
/**
* optional .hadoop.yarn.SubClusterPolicyConfigurationProto policy_configuration = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProtoOrBuilder getPolicyConfigurationOrBuilder() {
return policyConfiguration_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto.getDefaultInstance() : policyConfiguration_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getPolicyConfiguration());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getPolicyConfiguration());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto) obj;
if (hasPolicyConfiguration() != other.hasPolicyConfiguration()) return false;
if (hasPolicyConfiguration()) {
if (!getPolicyConfiguration()
.equals(other.getPolicyConfiguration())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasPolicyConfiguration()) {
hash = (37 * hash) + POLICY_CONFIGURATION_FIELD_NUMBER;
hash = (53 * hash) + getPolicyConfiguration().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.SetSubClusterPolicyConfigurationRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.SetSubClusterPolicyConfigurationRequestProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SetSubClusterPolicyConfigurationRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SetSubClusterPolicyConfigurationRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getPolicyConfigurationFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
policyConfiguration_ = null;
if (policyConfigurationBuilder_ != null) {
policyConfigurationBuilder_.dispose();
policyConfigurationBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SetSubClusterPolicyConfigurationRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.policyConfiguration_ = policyConfigurationBuilder_ == null
? policyConfiguration_
: policyConfigurationBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto.getDefaultInstance()) return this;
if (other.hasPolicyConfiguration()) {
mergePolicyConfiguration(other.getPolicyConfiguration());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getPolicyConfigurationFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto policyConfiguration_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProtoOrBuilder> policyConfigurationBuilder_;
/**
* optional .hadoop.yarn.SubClusterPolicyConfigurationProto policy_configuration = 1;
* @return Whether the policyConfiguration field is set.
*/
public boolean hasPolicyConfiguration() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.SubClusterPolicyConfigurationProto policy_configuration = 1;
* @return The policyConfiguration.
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto getPolicyConfiguration() {
if (policyConfigurationBuilder_ == null) {
return policyConfiguration_ == null ? org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto.getDefaultInstance() : policyConfiguration_;
} else {
return policyConfigurationBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.SubClusterPolicyConfigurationProto policy_configuration = 1;
*/
public Builder setPolicyConfiguration(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto value) {
if (policyConfigurationBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
policyConfiguration_ = value;
} else {
policyConfigurationBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterPolicyConfigurationProto policy_configuration = 1;
*/
public Builder setPolicyConfiguration(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto.Builder builderForValue) {
if (policyConfigurationBuilder_ == null) {
policyConfiguration_ = builderForValue.build();
} else {
policyConfigurationBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterPolicyConfigurationProto policy_configuration = 1;
*/
public Builder mergePolicyConfiguration(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto value) {
if (policyConfigurationBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
policyConfiguration_ != null &&
policyConfiguration_ != org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto.getDefaultInstance()) {
getPolicyConfigurationBuilder().mergeFrom(value);
} else {
policyConfiguration_ = value;
}
} else {
policyConfigurationBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterPolicyConfigurationProto policy_configuration = 1;
*/
public Builder clearPolicyConfiguration() {
bitField0_ = (bitField0_ & ~0x00000001);
policyConfiguration_ = null;
if (policyConfigurationBuilder_ != null) {
policyConfigurationBuilder_.dispose();
policyConfigurationBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.SubClusterPolicyConfigurationProto policy_configuration = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto.Builder getPolicyConfigurationBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getPolicyConfigurationFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.SubClusterPolicyConfigurationProto policy_configuration = 1;
*/
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProtoOrBuilder getPolicyConfigurationOrBuilder() {
if (policyConfigurationBuilder_ != null) {
return policyConfigurationBuilder_.getMessageOrBuilder();
} else {
return policyConfiguration_ == null ?
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto.getDefaultInstance() : policyConfiguration_;
}
}
/**
* optional .hadoop.yarn.SubClusterPolicyConfigurationProto policy_configuration = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProtoOrBuilder>
getPolicyConfigurationFieldBuilder() {
if (policyConfigurationBuilder_ == null) {
policyConfigurationBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProto.Builder, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterPolicyConfigurationProtoOrBuilder>(
getPolicyConfiguration(),
getParentForChildren(),
isClean());
policyConfiguration_ = null;
}
return policyConfigurationBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.SetSubClusterPolicyConfigurationRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.SetSubClusterPolicyConfigurationRequestProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public SetSubClusterPolicyConfigurationRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface SetSubClusterPolicyConfigurationResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.SetSubClusterPolicyConfigurationResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.yarn.SetSubClusterPolicyConfigurationResponseProto}
*/
public static final class SetSubClusterPolicyConfigurationResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.SetSubClusterPolicyConfigurationResponseProto)
SetSubClusterPolicyConfigurationResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use SetSubClusterPolicyConfigurationResponseProto.newBuilder() to construct.
private SetSubClusterPolicyConfigurationResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private SetSubClusterPolicyConfigurationResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new SetSubClusterPolicyConfigurationResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SetSubClusterPolicyConfigurationResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SetSubClusterPolicyConfigurationResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.SetSubClusterPolicyConfigurationResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.SetSubClusterPolicyConfigurationResponseProto)
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SetSubClusterPolicyConfigurationResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SetSubClusterPolicyConfigurationResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_SetSubClusterPolicyConfigurationResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProto build() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProto buildPartial() {
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProto result = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProto) {
return mergeFrom((org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProto other) {
if (other == org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.SetSubClusterPolicyConfigurationResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.SetSubClusterPolicyConfigurationResponseProto)
private static final org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProto();
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public SetSubClusterPolicyConfigurationResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SetSubClusterPolicyConfigurationResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetSubClusterPoliciesConfigurationsRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.GetSubClusterPoliciesConfigurationsRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.yarn.GetSubClusterPoliciesConfigurationsRequestProto}
*/
public static final class GetSubClusterPoliciesConfigurationsRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.GetSubClusterPoliciesConfigurationsRequestProto)
GetSubClusterPoliciesConfigurationsRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetSubClusterPoliciesConfigurationsRequestProto.newBuilder() to construct.
private GetSubClusterPoliciesConfigurationsRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetSubClusterPoliciesConfigurationsRequestProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetSubClusterPoliciesConfigurationsRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetSubClusterPoliciesConfigurationsRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.internal_static_hadoop_yarn_GetSubClusterPoliciesConfigurationsRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPoliciesConfigurationsRequestProto.class, org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPoliciesConfigurationsRequestProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPoliciesConfigurationsRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPoliciesConfigurationsRequestProto other = (org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPoliciesConfigurationsRequestProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPoliciesConfigurationsRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPoliciesConfigurationsRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPoliciesConfigurationsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPoliciesConfigurationsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPoliciesConfigurationsRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPoliciesConfigurationsRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPoliciesConfigurationsRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPoliciesConfigurationsRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPoliciesConfigurationsRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPoliciesConfigurationsRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPoliciesConfigurationsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPoliciesConfigurationsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.GetSubClusterPoliciesConfigurationsRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.GetSubClusterPoliciesConfigurationsRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder