Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos Maven / Gradle / Ivy
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: yarn_server_common_service_protos.proto
package org.apache.hadoop.yarn.proto;
public final class YarnServerCommonServiceProtos {
private YarnServerCommonServiceProtos() {}
public static void registerAllExtensions(
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite registry) {
}
public static void registerAllExtensions(
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistry registry) {
registerAllExtensions(
(org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite) registry);
}
public interface RemoteNodeProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.RemoteNodeProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
* @return Whether the nodeId field is set.
*/
boolean hasNodeId();
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
* @return The nodeId.
*/
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId();
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
*/
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder();
/**
* optional string http_address = 2;
* @return Whether the httpAddress field is set.
*/
boolean hasHttpAddress();
/**
* optional string http_address = 2;
* @return The httpAddress.
*/
java.lang.String getHttpAddress();
/**
* optional string http_address = 2;
* @return The bytes for httpAddress.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getHttpAddressBytes();
/**
* optional string rack_name = 3;
* @return Whether the rackName field is set.
*/
boolean hasRackName();
/**
* optional string rack_name = 3;
* @return The rackName.
*/
java.lang.String getRackName();
/**
* optional string rack_name = 3;
* @return The bytes for rackName.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getRackNameBytes();
/**
* optional string node_partition = 4;
* @return Whether the nodePartition field is set.
*/
boolean hasNodePartition();
/**
* optional string node_partition = 4;
* @return The nodePartition.
*/
java.lang.String getNodePartition();
/**
* optional string node_partition = 4;
* @return The bytes for nodePartition.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getNodePartitionBytes();
}
/**
* Protobuf type {@code hadoop.yarn.RemoteNodeProto}
*/
public static final class RemoteNodeProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.RemoteNodeProto)
RemoteNodeProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use RemoteNodeProto.newBuilder() to construct.
private RemoteNodeProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private RemoteNodeProto() {
httpAddress_ = "";
rackName_ = "";
nodePartition_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new RemoteNodeProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_RemoteNodeProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_RemoteNodeProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto.Builder.class);
}
private int bitField0_;
public static final int NODE_ID_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto nodeId_;
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
* @return Whether the nodeId field is set.
*/
@java.lang.Override
public boolean hasNodeId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
* @return The nodeId.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId() {
return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
}
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder() {
return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
}
public static final int HTTP_ADDRESS_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object httpAddress_ = "";
/**
* optional string http_address = 2;
* @return Whether the httpAddress field is set.
*/
@java.lang.Override
public boolean hasHttpAddress() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional string http_address = 2;
* @return The httpAddress.
*/
@java.lang.Override
public java.lang.String getHttpAddress() {
java.lang.Object ref = httpAddress_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
httpAddress_ = s;
}
return s;
}
}
/**
* optional string http_address = 2;
* @return The bytes for httpAddress.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getHttpAddressBytes() {
java.lang.Object ref = httpAddress_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
httpAddress_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int RACK_NAME_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object rackName_ = "";
/**
* optional string rack_name = 3;
* @return Whether the rackName field is set.
*/
@java.lang.Override
public boolean hasRackName() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional string rack_name = 3;
* @return The rackName.
*/
@java.lang.Override
public java.lang.String getRackName() {
java.lang.Object ref = rackName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
rackName_ = s;
}
return s;
}
}
/**
* optional string rack_name = 3;
* @return The bytes for rackName.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getRackNameBytes() {
java.lang.Object ref = rackName_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
rackName_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int NODE_PARTITION_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object nodePartition_ = "";
/**
* optional string node_partition = 4;
* @return Whether the nodePartition field is set.
*/
@java.lang.Override
public boolean hasNodePartition() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* optional string node_partition = 4;
* @return The nodePartition.
*/
@java.lang.Override
public java.lang.String getNodePartition() {
java.lang.Object ref = nodePartition_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
nodePartition_ = s;
}
return s;
}
}
/**
* optional string node_partition = 4;
* @return The bytes for nodePartition.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getNodePartitionBytes() {
java.lang.Object ref = nodePartition_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
nodePartition_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getNodeId());
}
if (((bitField0_ & 0x00000002) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, httpAddress_);
}
if (((bitField0_ & 0x00000004) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, rackName_);
}
if (((bitField0_ & 0x00000008) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 4, nodePartition_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getNodeId());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, httpAddress_);
}
if (((bitField0_ & 0x00000004) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, rackName_);
}
if (((bitField0_ & 0x00000008) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(4, nodePartition_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto other = (org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto) obj;
if (hasNodeId() != other.hasNodeId()) return false;
if (hasNodeId()) {
if (!getNodeId()
.equals(other.getNodeId())) return false;
}
if (hasHttpAddress() != other.hasHttpAddress()) return false;
if (hasHttpAddress()) {
if (!getHttpAddress()
.equals(other.getHttpAddress())) return false;
}
if (hasRackName() != other.hasRackName()) return false;
if (hasRackName()) {
if (!getRackName()
.equals(other.getRackName())) return false;
}
if (hasNodePartition() != other.hasNodePartition()) return false;
if (hasNodePartition()) {
if (!getNodePartition()
.equals(other.getNodePartition())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasNodeId()) {
hash = (37 * hash) + NODE_ID_FIELD_NUMBER;
hash = (53 * hash) + getNodeId().hashCode();
}
if (hasHttpAddress()) {
hash = (37 * hash) + HTTP_ADDRESS_FIELD_NUMBER;
hash = (53 * hash) + getHttpAddress().hashCode();
}
if (hasRackName()) {
hash = (37 * hash) + RACK_NAME_FIELD_NUMBER;
hash = (53 * hash) + getRackName().hashCode();
}
if (hasNodePartition()) {
hash = (37 * hash) + NODE_PARTITION_FIELD_NUMBER;
hash = (53 * hash) + getNodePartition().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.RemoteNodeProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.RemoteNodeProto)
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_RemoteNodeProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_RemoteNodeProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getNodeIdFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
nodeId_ = null;
if (nodeIdBuilder_ != null) {
nodeIdBuilder_.dispose();
nodeIdBuilder_ = null;
}
httpAddress_ = "";
rackName_ = "";
nodePartition_ = "";
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_RemoteNodeProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto build() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto result = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.nodeId_ = nodeIdBuilder_ == null
? nodeId_
: nodeIdBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.httpAddress_ = httpAddress_;
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.rackName_ = rackName_;
to_bitField0_ |= 0x00000004;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.nodePartition_ = nodePartition_;
to_bitField0_ |= 0x00000008;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto) {
return mergeFrom((org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto.getDefaultInstance()) return this;
if (other.hasNodeId()) {
mergeNodeId(other.getNodeId());
}
if (other.hasHttpAddress()) {
httpAddress_ = other.httpAddress_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasRackName()) {
rackName_ = other.rackName_;
bitField0_ |= 0x00000004;
onChanged();
}
if (other.hasNodePartition()) {
nodePartition_ = other.nodePartition_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getNodeIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
httpAddress_ = input.readBytes();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26: {
rackName_ = input.readBytes();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34: {
nodePartition_ = input.readBytes();
bitField0_ |= 0x00000008;
break;
} // case 34
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto nodeId_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> nodeIdBuilder_;
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
* @return Whether the nodeId field is set.
*/
public boolean hasNodeId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
* @return The nodeId.
*/
public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId() {
if (nodeIdBuilder_ == null) {
return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
} else {
return nodeIdBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
*/
public Builder setNodeId(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) {
if (nodeIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
nodeId_ = value;
} else {
nodeIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
*/
public Builder setNodeId(
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder builderForValue) {
if (nodeIdBuilder_ == null) {
nodeId_ = builderForValue.build();
} else {
nodeIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
*/
public Builder mergeNodeId(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) {
if (nodeIdBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
nodeId_ != null &&
nodeId_ != org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance()) {
getNodeIdBuilder().mergeFrom(value);
} else {
nodeId_ = value;
}
} else {
nodeIdBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
*/
public Builder clearNodeId() {
bitField0_ = (bitField0_ & ~0x00000001);
nodeId_ = null;
if (nodeIdBuilder_ != null) {
nodeIdBuilder_.dispose();
nodeIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder getNodeIdBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getNodeIdFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder() {
if (nodeIdBuilder_ != null) {
return nodeIdBuilder_.getMessageOrBuilder();
} else {
return nodeId_ == null ?
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
}
}
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder>
getNodeIdFieldBuilder() {
if (nodeIdBuilder_ == null) {
nodeIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder>(
getNodeId(),
getParentForChildren(),
isClean());
nodeId_ = null;
}
return nodeIdBuilder_;
}
private java.lang.Object httpAddress_ = "";
/**
* optional string http_address = 2;
* @return Whether the httpAddress field is set.
*/
public boolean hasHttpAddress() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional string http_address = 2;
* @return The httpAddress.
*/
public java.lang.String getHttpAddress() {
java.lang.Object ref = httpAddress_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
httpAddress_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string http_address = 2;
* @return The bytes for httpAddress.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getHttpAddressBytes() {
java.lang.Object ref = httpAddress_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
httpAddress_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string http_address = 2;
* @param value The httpAddress to set.
* @return This builder for chaining.
*/
public Builder setHttpAddress(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
httpAddress_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* optional string http_address = 2;
* @return This builder for chaining.
*/
public Builder clearHttpAddress() {
httpAddress_ = getDefaultInstance().getHttpAddress();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* optional string http_address = 2;
* @param value The bytes for httpAddress to set.
* @return This builder for chaining.
*/
public Builder setHttpAddressBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
httpAddress_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object rackName_ = "";
/**
* optional string rack_name = 3;
* @return Whether the rackName field is set.
*/
public boolean hasRackName() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional string rack_name = 3;
* @return The rackName.
*/
public java.lang.String getRackName() {
java.lang.Object ref = rackName_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
rackName_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string rack_name = 3;
* @return The bytes for rackName.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getRackNameBytes() {
java.lang.Object ref = rackName_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
rackName_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string rack_name = 3;
* @param value The rackName to set.
* @return This builder for chaining.
*/
public Builder setRackName(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
rackName_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* optional string rack_name = 3;
* @return This builder for chaining.
*/
public Builder clearRackName() {
rackName_ = getDefaultInstance().getRackName();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
* optional string rack_name = 3;
* @param value The bytes for rackName to set.
* @return This builder for chaining.
*/
public Builder setRackNameBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
rackName_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object nodePartition_ = "";
/**
* optional string node_partition = 4;
* @return Whether the nodePartition field is set.
*/
public boolean hasNodePartition() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* optional string node_partition = 4;
* @return The nodePartition.
*/
public java.lang.String getNodePartition() {
java.lang.Object ref = nodePartition_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
nodePartition_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string node_partition = 4;
* @return The bytes for nodePartition.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getNodePartitionBytes() {
java.lang.Object ref = nodePartition_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
nodePartition_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string node_partition = 4;
* @param value The nodePartition to set.
* @return This builder for chaining.
*/
public Builder setNodePartition(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
nodePartition_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
* optional string node_partition = 4;
* @return This builder for chaining.
*/
public Builder clearNodePartition() {
nodePartition_ = getDefaultInstance().getNodePartition();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
* optional string node_partition = 4;
* @param value The bytes for nodePartition to set.
* @return This builder for chaining.
*/
public Builder setNodePartitionBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
nodePartition_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.RemoteNodeProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.RemoteNodeProto)
private static final org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto();
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public RemoteNodeProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface RegisterDistributedSchedulingAMResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.RegisterDistributedSchedulingAMResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.RegisterApplicationMasterResponseProto register_response = 1;
* @return Whether the registerResponse field is set.
*/
boolean hasRegisterResponse();
/**
* optional .hadoop.yarn.RegisterApplicationMasterResponseProto register_response = 1;
* @return The registerResponse.
*/
org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto getRegisterResponse();
/**
* optional .hadoop.yarn.RegisterApplicationMasterResponseProto register_response = 1;
*/
org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProtoOrBuilder getRegisterResponseOrBuilder();
/**
* optional .hadoop.yarn.ResourceProto max_container_resource = 2;
* @return Whether the maxContainerResource field is set.
*/
boolean hasMaxContainerResource();
/**
* optional .hadoop.yarn.ResourceProto max_container_resource = 2;
* @return The maxContainerResource.
*/
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getMaxContainerResource();
/**
* optional .hadoop.yarn.ResourceProto max_container_resource = 2;
*/
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getMaxContainerResourceOrBuilder();
/**
* optional .hadoop.yarn.ResourceProto min_container_resource = 3;
* @return Whether the minContainerResource field is set.
*/
boolean hasMinContainerResource();
/**
* optional .hadoop.yarn.ResourceProto min_container_resource = 3;
* @return The minContainerResource.
*/
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getMinContainerResource();
/**
* optional .hadoop.yarn.ResourceProto min_container_resource = 3;
*/
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getMinContainerResourceOrBuilder();
/**
* optional .hadoop.yarn.ResourceProto incr_container_resource = 4;
* @return Whether the incrContainerResource field is set.
*/
boolean hasIncrContainerResource();
/**
* optional .hadoop.yarn.ResourceProto incr_container_resource = 4;
* @return The incrContainerResource.
*/
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getIncrContainerResource();
/**
* optional .hadoop.yarn.ResourceProto incr_container_resource = 4;
*/
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getIncrContainerResourceOrBuilder();
/**
* optional int32 container_token_expiry_interval = 5;
* @return Whether the containerTokenExpiryInterval field is set.
*/
boolean hasContainerTokenExpiryInterval();
/**
* optional int32 container_token_expiry_interval = 5;
* @return The containerTokenExpiryInterval.
*/
int getContainerTokenExpiryInterval();
/**
* optional int64 container_id_start = 6;
* @return Whether the containerIdStart field is set.
*/
boolean hasContainerIdStart();
/**
* optional int64 container_id_start = 6;
* @return The containerIdStart.
*/
long getContainerIdStart();
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 7;
*/
java.util.List
getNodesForSchedulingList();
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 7;
*/
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto getNodesForScheduling(int index);
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 7;
*/
int getNodesForSchedulingCount();
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 7;
*/
java.util.List extends org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProtoOrBuilder>
getNodesForSchedulingOrBuilderList();
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 7;
*/
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProtoOrBuilder getNodesForSchedulingOrBuilder(
int index);
}
/**
* Protobuf type {@code hadoop.yarn.RegisterDistributedSchedulingAMResponseProto}
*/
public static final class RegisterDistributedSchedulingAMResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.RegisterDistributedSchedulingAMResponseProto)
RegisterDistributedSchedulingAMResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use RegisterDistributedSchedulingAMResponseProto.newBuilder() to construct.
private RegisterDistributedSchedulingAMResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private RegisterDistributedSchedulingAMResponseProto() {
nodesForScheduling_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new RegisterDistributedSchedulingAMResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_RegisterDistributedSchedulingAMResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_RegisterDistributedSchedulingAMResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto.Builder.class);
}
private int bitField0_;
public static final int REGISTER_RESPONSE_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto registerResponse_;
/**
* optional .hadoop.yarn.RegisterApplicationMasterResponseProto register_response = 1;
* @return Whether the registerResponse field is set.
*/
@java.lang.Override
public boolean hasRegisterResponse() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.RegisterApplicationMasterResponseProto register_response = 1;
* @return The registerResponse.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto getRegisterResponse() {
return registerResponse_ == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto.getDefaultInstance() : registerResponse_;
}
/**
* optional .hadoop.yarn.RegisterApplicationMasterResponseProto register_response = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProtoOrBuilder getRegisterResponseOrBuilder() {
return registerResponse_ == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto.getDefaultInstance() : registerResponse_;
}
public static final int MAX_CONTAINER_RESOURCE_FIELD_NUMBER = 2;
private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto maxContainerResource_;
/**
* optional .hadoop.yarn.ResourceProto max_container_resource = 2;
* @return Whether the maxContainerResource field is set.
*/
@java.lang.Override
public boolean hasMaxContainerResource() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional .hadoop.yarn.ResourceProto max_container_resource = 2;
* @return The maxContainerResource.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getMaxContainerResource() {
return maxContainerResource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : maxContainerResource_;
}
/**
* optional .hadoop.yarn.ResourceProto max_container_resource = 2;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getMaxContainerResourceOrBuilder() {
return maxContainerResource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : maxContainerResource_;
}
public static final int MIN_CONTAINER_RESOURCE_FIELD_NUMBER = 3;
private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto minContainerResource_;
/**
* optional .hadoop.yarn.ResourceProto min_container_resource = 3;
* @return Whether the minContainerResource field is set.
*/
@java.lang.Override
public boolean hasMinContainerResource() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional .hadoop.yarn.ResourceProto min_container_resource = 3;
* @return The minContainerResource.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getMinContainerResource() {
return minContainerResource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : minContainerResource_;
}
/**
* optional .hadoop.yarn.ResourceProto min_container_resource = 3;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getMinContainerResourceOrBuilder() {
return minContainerResource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : minContainerResource_;
}
public static final int INCR_CONTAINER_RESOURCE_FIELD_NUMBER = 4;
private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto incrContainerResource_;
/**
* optional .hadoop.yarn.ResourceProto incr_container_resource = 4;
* @return Whether the incrContainerResource field is set.
*/
@java.lang.Override
public boolean hasIncrContainerResource() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* optional .hadoop.yarn.ResourceProto incr_container_resource = 4;
* @return The incrContainerResource.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getIncrContainerResource() {
return incrContainerResource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : incrContainerResource_;
}
/**
* optional .hadoop.yarn.ResourceProto incr_container_resource = 4;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getIncrContainerResourceOrBuilder() {
return incrContainerResource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : incrContainerResource_;
}
public static final int CONTAINER_TOKEN_EXPIRY_INTERVAL_FIELD_NUMBER = 5;
private int containerTokenExpiryInterval_ = 0;
/**
* optional int32 container_token_expiry_interval = 5;
* @return Whether the containerTokenExpiryInterval field is set.
*/
@java.lang.Override
public boolean hasContainerTokenExpiryInterval() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* optional int32 container_token_expiry_interval = 5;
* @return The containerTokenExpiryInterval.
*/
@java.lang.Override
public int getContainerTokenExpiryInterval() {
return containerTokenExpiryInterval_;
}
public static final int CONTAINER_ID_START_FIELD_NUMBER = 6;
private long containerIdStart_ = 0L;
/**
* optional int64 container_id_start = 6;
* @return Whether the containerIdStart field is set.
*/
@java.lang.Override
public boolean hasContainerIdStart() {
return ((bitField0_ & 0x00000020) != 0);
}
/**
* optional int64 container_id_start = 6;
* @return The containerIdStart.
*/
@java.lang.Override
public long getContainerIdStart() {
return containerIdStart_;
}
public static final int NODES_FOR_SCHEDULING_FIELD_NUMBER = 7;
@SuppressWarnings("serial")
private java.util.List nodesForScheduling_;
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 7;
*/
@java.lang.Override
public java.util.List getNodesForSchedulingList() {
return nodesForScheduling_;
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 7;
*/
@java.lang.Override
public java.util.List extends org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProtoOrBuilder>
getNodesForSchedulingOrBuilderList() {
return nodesForScheduling_;
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 7;
*/
@java.lang.Override
public int getNodesForSchedulingCount() {
return nodesForScheduling_.size();
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 7;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto getNodesForScheduling(int index) {
return nodesForScheduling_.get(index);
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 7;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProtoOrBuilder getNodesForSchedulingOrBuilder(
int index) {
return nodesForScheduling_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (hasRegisterResponse()) {
if (!getRegisterResponse().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
if (hasMaxContainerResource()) {
if (!getMaxContainerResource().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
if (hasMinContainerResource()) {
if (!getMinContainerResource().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
if (hasIncrContainerResource()) {
if (!getIncrContainerResource().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getRegisterResponse());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getMaxContainerResource());
}
if (((bitField0_ & 0x00000004) != 0)) {
output.writeMessage(3, getMinContainerResource());
}
if (((bitField0_ & 0x00000008) != 0)) {
output.writeMessage(4, getIncrContainerResource());
}
if (((bitField0_ & 0x00000010) != 0)) {
output.writeInt32(5, containerTokenExpiryInterval_);
}
if (((bitField0_ & 0x00000020) != 0)) {
output.writeInt64(6, containerIdStart_);
}
for (int i = 0; i < nodesForScheduling_.size(); i++) {
output.writeMessage(7, nodesForScheduling_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getRegisterResponse());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(2, getMaxContainerResource());
}
if (((bitField0_ & 0x00000004) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(3, getMinContainerResource());
}
if (((bitField0_ & 0x00000008) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(4, getIncrContainerResource());
}
if (((bitField0_ & 0x00000010) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt32Size(5, containerTokenExpiryInterval_);
}
if (((bitField0_ & 0x00000020) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt64Size(6, containerIdStart_);
}
for (int i = 0; i < nodesForScheduling_.size(); i++) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(7, nodesForScheduling_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto other = (org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto) obj;
if (hasRegisterResponse() != other.hasRegisterResponse()) return false;
if (hasRegisterResponse()) {
if (!getRegisterResponse()
.equals(other.getRegisterResponse())) return false;
}
if (hasMaxContainerResource() != other.hasMaxContainerResource()) return false;
if (hasMaxContainerResource()) {
if (!getMaxContainerResource()
.equals(other.getMaxContainerResource())) return false;
}
if (hasMinContainerResource() != other.hasMinContainerResource()) return false;
if (hasMinContainerResource()) {
if (!getMinContainerResource()
.equals(other.getMinContainerResource())) return false;
}
if (hasIncrContainerResource() != other.hasIncrContainerResource()) return false;
if (hasIncrContainerResource()) {
if (!getIncrContainerResource()
.equals(other.getIncrContainerResource())) return false;
}
if (hasContainerTokenExpiryInterval() != other.hasContainerTokenExpiryInterval()) return false;
if (hasContainerTokenExpiryInterval()) {
if (getContainerTokenExpiryInterval()
!= other.getContainerTokenExpiryInterval()) return false;
}
if (hasContainerIdStart() != other.hasContainerIdStart()) return false;
if (hasContainerIdStart()) {
if (getContainerIdStart()
!= other.getContainerIdStart()) return false;
}
if (!getNodesForSchedulingList()
.equals(other.getNodesForSchedulingList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasRegisterResponse()) {
hash = (37 * hash) + REGISTER_RESPONSE_FIELD_NUMBER;
hash = (53 * hash) + getRegisterResponse().hashCode();
}
if (hasMaxContainerResource()) {
hash = (37 * hash) + MAX_CONTAINER_RESOURCE_FIELD_NUMBER;
hash = (53 * hash) + getMaxContainerResource().hashCode();
}
if (hasMinContainerResource()) {
hash = (37 * hash) + MIN_CONTAINER_RESOURCE_FIELD_NUMBER;
hash = (53 * hash) + getMinContainerResource().hashCode();
}
if (hasIncrContainerResource()) {
hash = (37 * hash) + INCR_CONTAINER_RESOURCE_FIELD_NUMBER;
hash = (53 * hash) + getIncrContainerResource().hashCode();
}
if (hasContainerTokenExpiryInterval()) {
hash = (37 * hash) + CONTAINER_TOKEN_EXPIRY_INTERVAL_FIELD_NUMBER;
hash = (53 * hash) + getContainerTokenExpiryInterval();
}
if (hasContainerIdStart()) {
hash = (37 * hash) + CONTAINER_ID_START_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
getContainerIdStart());
}
if (getNodesForSchedulingCount() > 0) {
hash = (37 * hash) + NODES_FOR_SCHEDULING_FIELD_NUMBER;
hash = (53 * hash) + getNodesForSchedulingList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.RegisterDistributedSchedulingAMResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.RegisterDistributedSchedulingAMResponseProto)
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_RegisterDistributedSchedulingAMResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_RegisterDistributedSchedulingAMResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getRegisterResponseFieldBuilder();
getMaxContainerResourceFieldBuilder();
getMinContainerResourceFieldBuilder();
getIncrContainerResourceFieldBuilder();
getNodesForSchedulingFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
registerResponse_ = null;
if (registerResponseBuilder_ != null) {
registerResponseBuilder_.dispose();
registerResponseBuilder_ = null;
}
maxContainerResource_ = null;
if (maxContainerResourceBuilder_ != null) {
maxContainerResourceBuilder_.dispose();
maxContainerResourceBuilder_ = null;
}
minContainerResource_ = null;
if (minContainerResourceBuilder_ != null) {
minContainerResourceBuilder_.dispose();
minContainerResourceBuilder_ = null;
}
incrContainerResource_ = null;
if (incrContainerResourceBuilder_ != null) {
incrContainerResourceBuilder_.dispose();
incrContainerResourceBuilder_ = null;
}
containerTokenExpiryInterval_ = 0;
containerIdStart_ = 0L;
if (nodesForSchedulingBuilder_ == null) {
nodesForScheduling_ = java.util.Collections.emptyList();
} else {
nodesForScheduling_ = null;
nodesForSchedulingBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000040);
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_RegisterDistributedSchedulingAMResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto build() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto result = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto result) {
if (nodesForSchedulingBuilder_ == null) {
if (((bitField0_ & 0x00000040) != 0)) {
nodesForScheduling_ = java.util.Collections.unmodifiableList(nodesForScheduling_);
bitField0_ = (bitField0_ & ~0x00000040);
}
result.nodesForScheduling_ = nodesForScheduling_;
} else {
result.nodesForScheduling_ = nodesForSchedulingBuilder_.build();
}
}
private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.registerResponse_ = registerResponseBuilder_ == null
? registerResponse_
: registerResponseBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.maxContainerResource_ = maxContainerResourceBuilder_ == null
? maxContainerResource_
: maxContainerResourceBuilder_.build();
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.minContainerResource_ = minContainerResourceBuilder_ == null
? minContainerResource_
: minContainerResourceBuilder_.build();
to_bitField0_ |= 0x00000004;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.incrContainerResource_ = incrContainerResourceBuilder_ == null
? incrContainerResource_
: incrContainerResourceBuilder_.build();
to_bitField0_ |= 0x00000008;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.containerTokenExpiryInterval_ = containerTokenExpiryInterval_;
to_bitField0_ |= 0x00000010;
}
if (((from_bitField0_ & 0x00000020) != 0)) {
result.containerIdStart_ = containerIdStart_;
to_bitField0_ |= 0x00000020;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto) {
return mergeFrom((org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto.getDefaultInstance()) return this;
if (other.hasRegisterResponse()) {
mergeRegisterResponse(other.getRegisterResponse());
}
if (other.hasMaxContainerResource()) {
mergeMaxContainerResource(other.getMaxContainerResource());
}
if (other.hasMinContainerResource()) {
mergeMinContainerResource(other.getMinContainerResource());
}
if (other.hasIncrContainerResource()) {
mergeIncrContainerResource(other.getIncrContainerResource());
}
if (other.hasContainerTokenExpiryInterval()) {
setContainerTokenExpiryInterval(other.getContainerTokenExpiryInterval());
}
if (other.hasContainerIdStart()) {
setContainerIdStart(other.getContainerIdStart());
}
if (nodesForSchedulingBuilder_ == null) {
if (!other.nodesForScheduling_.isEmpty()) {
if (nodesForScheduling_.isEmpty()) {
nodesForScheduling_ = other.nodesForScheduling_;
bitField0_ = (bitField0_ & ~0x00000040);
} else {
ensureNodesForSchedulingIsMutable();
nodesForScheduling_.addAll(other.nodesForScheduling_);
}
onChanged();
}
} else {
if (!other.nodesForScheduling_.isEmpty()) {
if (nodesForSchedulingBuilder_.isEmpty()) {
nodesForSchedulingBuilder_.dispose();
nodesForSchedulingBuilder_ = null;
nodesForScheduling_ = other.nodesForScheduling_;
bitField0_ = (bitField0_ & ~0x00000040);
nodesForSchedulingBuilder_ =
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getNodesForSchedulingFieldBuilder() : null;
} else {
nodesForSchedulingBuilder_.addAllMessages(other.nodesForScheduling_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (hasRegisterResponse()) {
if (!getRegisterResponse().isInitialized()) {
return false;
}
}
if (hasMaxContainerResource()) {
if (!getMaxContainerResource().isInitialized()) {
return false;
}
}
if (hasMinContainerResource()) {
if (!getMinContainerResource().isInitialized()) {
return false;
}
}
if (hasIncrContainerResource()) {
if (!getIncrContainerResource().isInitialized()) {
return false;
}
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getRegisterResponseFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
input.readMessage(
getMaxContainerResourceFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26: {
input.readMessage(
getMinContainerResourceFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
case 34: {
input.readMessage(
getIncrContainerResourceFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000008;
break;
} // case 34
case 40: {
containerTokenExpiryInterval_ = input.readInt32();
bitField0_ |= 0x00000010;
break;
} // case 40
case 48: {
containerIdStart_ = input.readInt64();
bitField0_ |= 0x00000020;
break;
} // case 48
case 58: {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto m =
input.readMessage(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto.PARSER,
extensionRegistry);
if (nodesForSchedulingBuilder_ == null) {
ensureNodesForSchedulingIsMutable();
nodesForScheduling_.add(m);
} else {
nodesForSchedulingBuilder_.addMessage(m);
}
break;
} // case 58
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto registerResponse_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProtoOrBuilder> registerResponseBuilder_;
/**
* optional .hadoop.yarn.RegisterApplicationMasterResponseProto register_response = 1;
* @return Whether the registerResponse field is set.
*/
public boolean hasRegisterResponse() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.RegisterApplicationMasterResponseProto register_response = 1;
* @return The registerResponse.
*/
public org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto getRegisterResponse() {
if (registerResponseBuilder_ == null) {
return registerResponse_ == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto.getDefaultInstance() : registerResponse_;
} else {
return registerResponseBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.RegisterApplicationMasterResponseProto register_response = 1;
*/
public Builder setRegisterResponse(org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto value) {
if (registerResponseBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
registerResponse_ = value;
} else {
registerResponseBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.RegisterApplicationMasterResponseProto register_response = 1;
*/
public Builder setRegisterResponse(
org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto.Builder builderForValue) {
if (registerResponseBuilder_ == null) {
registerResponse_ = builderForValue.build();
} else {
registerResponseBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.RegisterApplicationMasterResponseProto register_response = 1;
*/
public Builder mergeRegisterResponse(org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto value) {
if (registerResponseBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
registerResponse_ != null &&
registerResponse_ != org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto.getDefaultInstance()) {
getRegisterResponseBuilder().mergeFrom(value);
} else {
registerResponse_ = value;
}
} else {
registerResponseBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.RegisterApplicationMasterResponseProto register_response = 1;
*/
public Builder clearRegisterResponse() {
bitField0_ = (bitField0_ & ~0x00000001);
registerResponse_ = null;
if (registerResponseBuilder_ != null) {
registerResponseBuilder_.dispose();
registerResponseBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.RegisterApplicationMasterResponseProto register_response = 1;
*/
public org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto.Builder getRegisterResponseBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getRegisterResponseFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.RegisterApplicationMasterResponseProto register_response = 1;
*/
public org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProtoOrBuilder getRegisterResponseOrBuilder() {
if (registerResponseBuilder_ != null) {
return registerResponseBuilder_.getMessageOrBuilder();
} else {
return registerResponse_ == null ?
org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto.getDefaultInstance() : registerResponse_;
}
}
/**
* optional .hadoop.yarn.RegisterApplicationMasterResponseProto register_response = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProtoOrBuilder>
getRegisterResponseFieldBuilder() {
if (registerResponseBuilder_ == null) {
registerResponseBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProtoOrBuilder>(
getRegisterResponse(),
getParentForChildren(),
isClean());
registerResponse_ = null;
}
return registerResponseBuilder_;
}
private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto maxContainerResource_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> maxContainerResourceBuilder_;
/**
* optional .hadoop.yarn.ResourceProto max_container_resource = 2;
* @return Whether the maxContainerResource field is set.
*/
public boolean hasMaxContainerResource() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional .hadoop.yarn.ResourceProto max_container_resource = 2;
* @return The maxContainerResource.
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getMaxContainerResource() {
if (maxContainerResourceBuilder_ == null) {
return maxContainerResource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : maxContainerResource_;
} else {
return maxContainerResourceBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.ResourceProto max_container_resource = 2;
*/
public Builder setMaxContainerResource(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
if (maxContainerResourceBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
maxContainerResource_ = value;
} else {
maxContainerResourceBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ResourceProto max_container_resource = 2;
*/
public Builder setMaxContainerResource(
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) {
if (maxContainerResourceBuilder_ == null) {
maxContainerResource_ = builderForValue.build();
} else {
maxContainerResourceBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ResourceProto max_container_resource = 2;
*/
public Builder mergeMaxContainerResource(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
if (maxContainerResourceBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0) &&
maxContainerResource_ != null &&
maxContainerResource_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) {
getMaxContainerResourceBuilder().mergeFrom(value);
} else {
maxContainerResource_ = value;
}
} else {
maxContainerResourceBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ResourceProto max_container_resource = 2;
*/
public Builder clearMaxContainerResource() {
bitField0_ = (bitField0_ & ~0x00000002);
maxContainerResource_ = null;
if (maxContainerResourceBuilder_ != null) {
maxContainerResourceBuilder_.dispose();
maxContainerResourceBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ResourceProto max_container_resource = 2;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getMaxContainerResourceBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getMaxContainerResourceFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.ResourceProto max_container_resource = 2;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getMaxContainerResourceOrBuilder() {
if (maxContainerResourceBuilder_ != null) {
return maxContainerResourceBuilder_.getMessageOrBuilder();
} else {
return maxContainerResource_ == null ?
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : maxContainerResource_;
}
}
/**
* optional .hadoop.yarn.ResourceProto max_container_resource = 2;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>
getMaxContainerResourceFieldBuilder() {
if (maxContainerResourceBuilder_ == null) {
maxContainerResourceBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>(
getMaxContainerResource(),
getParentForChildren(),
isClean());
maxContainerResource_ = null;
}
return maxContainerResourceBuilder_;
}
private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto minContainerResource_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> minContainerResourceBuilder_;
/**
* optional .hadoop.yarn.ResourceProto min_container_resource = 3;
* @return Whether the minContainerResource field is set.
*/
public boolean hasMinContainerResource() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional .hadoop.yarn.ResourceProto min_container_resource = 3;
* @return The minContainerResource.
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getMinContainerResource() {
if (minContainerResourceBuilder_ == null) {
return minContainerResource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : minContainerResource_;
} else {
return minContainerResourceBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.ResourceProto min_container_resource = 3;
*/
public Builder setMinContainerResource(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
if (minContainerResourceBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
minContainerResource_ = value;
} else {
minContainerResourceBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ResourceProto min_container_resource = 3;
*/
public Builder setMinContainerResource(
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) {
if (minContainerResourceBuilder_ == null) {
minContainerResource_ = builderForValue.build();
} else {
minContainerResourceBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ResourceProto min_container_resource = 3;
*/
public Builder mergeMinContainerResource(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
if (minContainerResourceBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0) &&
minContainerResource_ != null &&
minContainerResource_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) {
getMinContainerResourceBuilder().mergeFrom(value);
} else {
minContainerResource_ = value;
}
} else {
minContainerResourceBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ResourceProto min_container_resource = 3;
*/
public Builder clearMinContainerResource() {
bitField0_ = (bitField0_ & ~0x00000004);
minContainerResource_ = null;
if (minContainerResourceBuilder_ != null) {
minContainerResourceBuilder_.dispose();
minContainerResourceBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ResourceProto min_container_resource = 3;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getMinContainerResourceBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getMinContainerResourceFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.ResourceProto min_container_resource = 3;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getMinContainerResourceOrBuilder() {
if (minContainerResourceBuilder_ != null) {
return minContainerResourceBuilder_.getMessageOrBuilder();
} else {
return minContainerResource_ == null ?
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : minContainerResource_;
}
}
/**
* optional .hadoop.yarn.ResourceProto min_container_resource = 3;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>
getMinContainerResourceFieldBuilder() {
if (minContainerResourceBuilder_ == null) {
minContainerResourceBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>(
getMinContainerResource(),
getParentForChildren(),
isClean());
minContainerResource_ = null;
}
return minContainerResourceBuilder_;
}
private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto incrContainerResource_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> incrContainerResourceBuilder_;
/**
* optional .hadoop.yarn.ResourceProto incr_container_resource = 4;
* @return Whether the incrContainerResource field is set.
*/
public boolean hasIncrContainerResource() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* optional .hadoop.yarn.ResourceProto incr_container_resource = 4;
* @return The incrContainerResource.
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getIncrContainerResource() {
if (incrContainerResourceBuilder_ == null) {
return incrContainerResource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : incrContainerResource_;
} else {
return incrContainerResourceBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.ResourceProto incr_container_resource = 4;
*/
public Builder setIncrContainerResource(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
if (incrContainerResourceBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
incrContainerResource_ = value;
} else {
incrContainerResourceBuilder_.setMessage(value);
}
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ResourceProto incr_container_resource = 4;
*/
public Builder setIncrContainerResource(
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) {
if (incrContainerResourceBuilder_ == null) {
incrContainerResource_ = builderForValue.build();
} else {
incrContainerResourceBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ResourceProto incr_container_resource = 4;
*/
public Builder mergeIncrContainerResource(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
if (incrContainerResourceBuilder_ == null) {
if (((bitField0_ & 0x00000008) != 0) &&
incrContainerResource_ != null &&
incrContainerResource_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) {
getIncrContainerResourceBuilder().mergeFrom(value);
} else {
incrContainerResource_ = value;
}
} else {
incrContainerResourceBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ResourceProto incr_container_resource = 4;
*/
public Builder clearIncrContainerResource() {
bitField0_ = (bitField0_ & ~0x00000008);
incrContainerResource_ = null;
if (incrContainerResourceBuilder_ != null) {
incrContainerResourceBuilder_.dispose();
incrContainerResourceBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ResourceProto incr_container_resource = 4;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getIncrContainerResourceBuilder() {
bitField0_ |= 0x00000008;
onChanged();
return getIncrContainerResourceFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.ResourceProto incr_container_resource = 4;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getIncrContainerResourceOrBuilder() {
if (incrContainerResourceBuilder_ != null) {
return incrContainerResourceBuilder_.getMessageOrBuilder();
} else {
return incrContainerResource_ == null ?
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : incrContainerResource_;
}
}
/**
* optional .hadoop.yarn.ResourceProto incr_container_resource = 4;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>
getIncrContainerResourceFieldBuilder() {
if (incrContainerResourceBuilder_ == null) {
incrContainerResourceBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>(
getIncrContainerResource(),
getParentForChildren(),
isClean());
incrContainerResource_ = null;
}
return incrContainerResourceBuilder_;
}
private int containerTokenExpiryInterval_ ;
/**
* optional int32 container_token_expiry_interval = 5;
* @return Whether the containerTokenExpiryInterval field is set.
*/
@java.lang.Override
public boolean hasContainerTokenExpiryInterval() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* optional int32 container_token_expiry_interval = 5;
* @return The containerTokenExpiryInterval.
*/
@java.lang.Override
public int getContainerTokenExpiryInterval() {
return containerTokenExpiryInterval_;
}
/**
* optional int32 container_token_expiry_interval = 5;
* @param value The containerTokenExpiryInterval to set.
* @return This builder for chaining.
*/
public Builder setContainerTokenExpiryInterval(int value) {
containerTokenExpiryInterval_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
* optional int32 container_token_expiry_interval = 5;
* @return This builder for chaining.
*/
public Builder clearContainerTokenExpiryInterval() {
bitField0_ = (bitField0_ & ~0x00000010);
containerTokenExpiryInterval_ = 0;
onChanged();
return this;
}
private long containerIdStart_ ;
/**
* optional int64 container_id_start = 6;
* @return Whether the containerIdStart field is set.
*/
@java.lang.Override
public boolean hasContainerIdStart() {
return ((bitField0_ & 0x00000020) != 0);
}
/**
* optional int64 container_id_start = 6;
* @return The containerIdStart.
*/
@java.lang.Override
public long getContainerIdStart() {
return containerIdStart_;
}
/**
* optional int64 container_id_start = 6;
* @param value The containerIdStart to set.
* @return This builder for chaining.
*/
public Builder setContainerIdStart(long value) {
containerIdStart_ = value;
bitField0_ |= 0x00000020;
onChanged();
return this;
}
/**
* optional int64 container_id_start = 6;
* @return This builder for chaining.
*/
public Builder clearContainerIdStart() {
bitField0_ = (bitField0_ & ~0x00000020);
containerIdStart_ = 0L;
onChanged();
return this;
}
private java.util.List nodesForScheduling_ =
java.util.Collections.emptyList();
private void ensureNodesForSchedulingIsMutable() {
if (!((bitField0_ & 0x00000040) != 0)) {
nodesForScheduling_ = new java.util.ArrayList(nodesForScheduling_);
bitField0_ |= 0x00000040;
}
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProtoOrBuilder> nodesForSchedulingBuilder_;
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 7;
*/
public java.util.List getNodesForSchedulingList() {
if (nodesForSchedulingBuilder_ == null) {
return java.util.Collections.unmodifiableList(nodesForScheduling_);
} else {
return nodesForSchedulingBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 7;
*/
public int getNodesForSchedulingCount() {
if (nodesForSchedulingBuilder_ == null) {
return nodesForScheduling_.size();
} else {
return nodesForSchedulingBuilder_.getCount();
}
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 7;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto getNodesForScheduling(int index) {
if (nodesForSchedulingBuilder_ == null) {
return nodesForScheduling_.get(index);
} else {
return nodesForSchedulingBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 7;
*/
public Builder setNodesForScheduling(
int index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto value) {
if (nodesForSchedulingBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNodesForSchedulingIsMutable();
nodesForScheduling_.set(index, value);
onChanged();
} else {
nodesForSchedulingBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 7;
*/
public Builder setNodesForScheduling(
int index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto.Builder builderForValue) {
if (nodesForSchedulingBuilder_ == null) {
ensureNodesForSchedulingIsMutable();
nodesForScheduling_.set(index, builderForValue.build());
onChanged();
} else {
nodesForSchedulingBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 7;
*/
public Builder addNodesForScheduling(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto value) {
if (nodesForSchedulingBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNodesForSchedulingIsMutable();
nodesForScheduling_.add(value);
onChanged();
} else {
nodesForSchedulingBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 7;
*/
public Builder addNodesForScheduling(
int index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto value) {
if (nodesForSchedulingBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNodesForSchedulingIsMutable();
nodesForScheduling_.add(index, value);
onChanged();
} else {
nodesForSchedulingBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 7;
*/
public Builder addNodesForScheduling(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto.Builder builderForValue) {
if (nodesForSchedulingBuilder_ == null) {
ensureNodesForSchedulingIsMutable();
nodesForScheduling_.add(builderForValue.build());
onChanged();
} else {
nodesForSchedulingBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 7;
*/
public Builder addNodesForScheduling(
int index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto.Builder builderForValue) {
if (nodesForSchedulingBuilder_ == null) {
ensureNodesForSchedulingIsMutable();
nodesForScheduling_.add(index, builderForValue.build());
onChanged();
} else {
nodesForSchedulingBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 7;
*/
public Builder addAllNodesForScheduling(
java.lang.Iterable extends org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto> values) {
if (nodesForSchedulingBuilder_ == null) {
ensureNodesForSchedulingIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, nodesForScheduling_);
onChanged();
} else {
nodesForSchedulingBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 7;
*/
public Builder clearNodesForScheduling() {
if (nodesForSchedulingBuilder_ == null) {
nodesForScheduling_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000040);
onChanged();
} else {
nodesForSchedulingBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 7;
*/
public Builder removeNodesForScheduling(int index) {
if (nodesForSchedulingBuilder_ == null) {
ensureNodesForSchedulingIsMutable();
nodesForScheduling_.remove(index);
onChanged();
} else {
nodesForSchedulingBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 7;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto.Builder getNodesForSchedulingBuilder(
int index) {
return getNodesForSchedulingFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 7;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProtoOrBuilder getNodesForSchedulingOrBuilder(
int index) {
if (nodesForSchedulingBuilder_ == null) {
return nodesForScheduling_.get(index); } else {
return nodesForSchedulingBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 7;
*/
public java.util.List extends org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProtoOrBuilder>
getNodesForSchedulingOrBuilderList() {
if (nodesForSchedulingBuilder_ != null) {
return nodesForSchedulingBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(nodesForScheduling_);
}
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 7;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto.Builder addNodesForSchedulingBuilder() {
return getNodesForSchedulingFieldBuilder().addBuilder(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 7;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto.Builder addNodesForSchedulingBuilder(
int index) {
return getNodesForSchedulingFieldBuilder().addBuilder(
index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 7;
*/
public java.util.List
getNodesForSchedulingBuilderList() {
return getNodesForSchedulingFieldBuilder().getBuilderList();
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProtoOrBuilder>
getNodesForSchedulingFieldBuilder() {
if (nodesForSchedulingBuilder_ == null) {
nodesForSchedulingBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProtoOrBuilder>(
nodesForScheduling_,
((bitField0_ & 0x00000040) != 0),
getParentForChildren(),
isClean());
nodesForScheduling_ = null;
}
return nodesForSchedulingBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.RegisterDistributedSchedulingAMResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.RegisterDistributedSchedulingAMResponseProto)
private static final org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto();
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public RegisterDistributedSchedulingAMResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterDistributedSchedulingAMResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface DistributedSchedulingAllocateResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.DistributedSchedulingAllocateResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.AllocateResponseProto allocate_response = 1;
* @return Whether the allocateResponse field is set.
*/
boolean hasAllocateResponse();
/**
* optional .hadoop.yarn.AllocateResponseProto allocate_response = 1;
* @return The allocateResponse.
*/
org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto getAllocateResponse();
/**
* optional .hadoop.yarn.AllocateResponseProto allocate_response = 1;
*/
org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProtoOrBuilder getAllocateResponseOrBuilder();
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 2;
*/
java.util.List
getNodesForSchedulingList();
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 2;
*/
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto getNodesForScheduling(int index);
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 2;
*/
int getNodesForSchedulingCount();
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 2;
*/
java.util.List extends org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProtoOrBuilder>
getNodesForSchedulingOrBuilderList();
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 2;
*/
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProtoOrBuilder getNodesForSchedulingOrBuilder(
int index);
}
/**
* Protobuf type {@code hadoop.yarn.DistributedSchedulingAllocateResponseProto}
*/
public static final class DistributedSchedulingAllocateResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.DistributedSchedulingAllocateResponseProto)
DistributedSchedulingAllocateResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use DistributedSchedulingAllocateResponseProto.newBuilder() to construct.
private DistributedSchedulingAllocateResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private DistributedSchedulingAllocateResponseProto() {
nodesForScheduling_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new DistributedSchedulingAllocateResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_DistributedSchedulingAllocateResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_DistributedSchedulingAllocateResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto.Builder.class);
}
private int bitField0_;
public static final int ALLOCATE_RESPONSE_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto allocateResponse_;
/**
* optional .hadoop.yarn.AllocateResponseProto allocate_response = 1;
* @return Whether the allocateResponse field is set.
*/
@java.lang.Override
public boolean hasAllocateResponse() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.AllocateResponseProto allocate_response = 1;
* @return The allocateResponse.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto getAllocateResponse() {
return allocateResponse_ == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto.getDefaultInstance() : allocateResponse_;
}
/**
* optional .hadoop.yarn.AllocateResponseProto allocate_response = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProtoOrBuilder getAllocateResponseOrBuilder() {
return allocateResponse_ == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto.getDefaultInstance() : allocateResponse_;
}
public static final int NODES_FOR_SCHEDULING_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private java.util.List nodesForScheduling_;
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 2;
*/
@java.lang.Override
public java.util.List getNodesForSchedulingList() {
return nodesForScheduling_;
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 2;
*/
@java.lang.Override
public java.util.List extends org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProtoOrBuilder>
getNodesForSchedulingOrBuilderList() {
return nodesForScheduling_;
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 2;
*/
@java.lang.Override
public int getNodesForSchedulingCount() {
return nodesForScheduling_.size();
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 2;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto getNodesForScheduling(int index) {
return nodesForScheduling_.get(index);
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 2;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProtoOrBuilder getNodesForSchedulingOrBuilder(
int index) {
return nodesForScheduling_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (hasAllocateResponse()) {
if (!getAllocateResponse().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getAllocateResponse());
}
for (int i = 0; i < nodesForScheduling_.size(); i++) {
output.writeMessage(2, nodesForScheduling_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getAllocateResponse());
}
for (int i = 0; i < nodesForScheduling_.size(); i++) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(2, nodesForScheduling_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto other = (org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto) obj;
if (hasAllocateResponse() != other.hasAllocateResponse()) return false;
if (hasAllocateResponse()) {
if (!getAllocateResponse()
.equals(other.getAllocateResponse())) return false;
}
if (!getNodesForSchedulingList()
.equals(other.getNodesForSchedulingList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasAllocateResponse()) {
hash = (37 * hash) + ALLOCATE_RESPONSE_FIELD_NUMBER;
hash = (53 * hash) + getAllocateResponse().hashCode();
}
if (getNodesForSchedulingCount() > 0) {
hash = (37 * hash) + NODES_FOR_SCHEDULING_FIELD_NUMBER;
hash = (53 * hash) + getNodesForSchedulingList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.DistributedSchedulingAllocateResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.DistributedSchedulingAllocateResponseProto)
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_DistributedSchedulingAllocateResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_DistributedSchedulingAllocateResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getAllocateResponseFieldBuilder();
getNodesForSchedulingFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
allocateResponse_ = null;
if (allocateResponseBuilder_ != null) {
allocateResponseBuilder_.dispose();
allocateResponseBuilder_ = null;
}
if (nodesForSchedulingBuilder_ == null) {
nodesForScheduling_ = java.util.Collections.emptyList();
} else {
nodesForScheduling_ = null;
nodesForSchedulingBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_DistributedSchedulingAllocateResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto build() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto result = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto result) {
if (nodesForSchedulingBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)) {
nodesForScheduling_ = java.util.Collections.unmodifiableList(nodesForScheduling_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.nodesForScheduling_ = nodesForScheduling_;
} else {
result.nodesForScheduling_ = nodesForSchedulingBuilder_.build();
}
}
private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.allocateResponse_ = allocateResponseBuilder_ == null
? allocateResponse_
: allocateResponseBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto) {
return mergeFrom((org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto.getDefaultInstance()) return this;
if (other.hasAllocateResponse()) {
mergeAllocateResponse(other.getAllocateResponse());
}
if (nodesForSchedulingBuilder_ == null) {
if (!other.nodesForScheduling_.isEmpty()) {
if (nodesForScheduling_.isEmpty()) {
nodesForScheduling_ = other.nodesForScheduling_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureNodesForSchedulingIsMutable();
nodesForScheduling_.addAll(other.nodesForScheduling_);
}
onChanged();
}
} else {
if (!other.nodesForScheduling_.isEmpty()) {
if (nodesForSchedulingBuilder_.isEmpty()) {
nodesForSchedulingBuilder_.dispose();
nodesForSchedulingBuilder_ = null;
nodesForScheduling_ = other.nodesForScheduling_;
bitField0_ = (bitField0_ & ~0x00000002);
nodesForSchedulingBuilder_ =
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getNodesForSchedulingFieldBuilder() : null;
} else {
nodesForSchedulingBuilder_.addAllMessages(other.nodesForScheduling_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (hasAllocateResponse()) {
if (!getAllocateResponse().isInitialized()) {
return false;
}
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getAllocateResponseFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto m =
input.readMessage(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto.PARSER,
extensionRegistry);
if (nodesForSchedulingBuilder_ == null) {
ensureNodesForSchedulingIsMutable();
nodesForScheduling_.add(m);
} else {
nodesForSchedulingBuilder_.addMessage(m);
}
break;
} // case 18
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto allocateResponse_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProtoOrBuilder> allocateResponseBuilder_;
/**
* optional .hadoop.yarn.AllocateResponseProto allocate_response = 1;
* @return Whether the allocateResponse field is set.
*/
public boolean hasAllocateResponse() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.AllocateResponseProto allocate_response = 1;
* @return The allocateResponse.
*/
public org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto getAllocateResponse() {
if (allocateResponseBuilder_ == null) {
return allocateResponse_ == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto.getDefaultInstance() : allocateResponse_;
} else {
return allocateResponseBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.AllocateResponseProto allocate_response = 1;
*/
public Builder setAllocateResponse(org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto value) {
if (allocateResponseBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
allocateResponse_ = value;
} else {
allocateResponseBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.AllocateResponseProto allocate_response = 1;
*/
public Builder setAllocateResponse(
org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto.Builder builderForValue) {
if (allocateResponseBuilder_ == null) {
allocateResponse_ = builderForValue.build();
} else {
allocateResponseBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.AllocateResponseProto allocate_response = 1;
*/
public Builder mergeAllocateResponse(org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto value) {
if (allocateResponseBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
allocateResponse_ != null &&
allocateResponse_ != org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto.getDefaultInstance()) {
getAllocateResponseBuilder().mergeFrom(value);
} else {
allocateResponse_ = value;
}
} else {
allocateResponseBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.AllocateResponseProto allocate_response = 1;
*/
public Builder clearAllocateResponse() {
bitField0_ = (bitField0_ & ~0x00000001);
allocateResponse_ = null;
if (allocateResponseBuilder_ != null) {
allocateResponseBuilder_.dispose();
allocateResponseBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.AllocateResponseProto allocate_response = 1;
*/
public org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto.Builder getAllocateResponseBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getAllocateResponseFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.AllocateResponseProto allocate_response = 1;
*/
public org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProtoOrBuilder getAllocateResponseOrBuilder() {
if (allocateResponseBuilder_ != null) {
return allocateResponseBuilder_.getMessageOrBuilder();
} else {
return allocateResponse_ == null ?
org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto.getDefaultInstance() : allocateResponse_;
}
}
/**
* optional .hadoop.yarn.AllocateResponseProto allocate_response = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProtoOrBuilder>
getAllocateResponseFieldBuilder() {
if (allocateResponseBuilder_ == null) {
allocateResponseBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProtoOrBuilder>(
getAllocateResponse(),
getParentForChildren(),
isClean());
allocateResponse_ = null;
}
return allocateResponseBuilder_;
}
private java.util.List nodesForScheduling_ =
java.util.Collections.emptyList();
private void ensureNodesForSchedulingIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
nodesForScheduling_ = new java.util.ArrayList(nodesForScheduling_);
bitField0_ |= 0x00000002;
}
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProtoOrBuilder> nodesForSchedulingBuilder_;
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 2;
*/
public java.util.List getNodesForSchedulingList() {
if (nodesForSchedulingBuilder_ == null) {
return java.util.Collections.unmodifiableList(nodesForScheduling_);
} else {
return nodesForSchedulingBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 2;
*/
public int getNodesForSchedulingCount() {
if (nodesForSchedulingBuilder_ == null) {
return nodesForScheduling_.size();
} else {
return nodesForSchedulingBuilder_.getCount();
}
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 2;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto getNodesForScheduling(int index) {
if (nodesForSchedulingBuilder_ == null) {
return nodesForScheduling_.get(index);
} else {
return nodesForSchedulingBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 2;
*/
public Builder setNodesForScheduling(
int index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto value) {
if (nodesForSchedulingBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNodesForSchedulingIsMutable();
nodesForScheduling_.set(index, value);
onChanged();
} else {
nodesForSchedulingBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 2;
*/
public Builder setNodesForScheduling(
int index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto.Builder builderForValue) {
if (nodesForSchedulingBuilder_ == null) {
ensureNodesForSchedulingIsMutable();
nodesForScheduling_.set(index, builderForValue.build());
onChanged();
} else {
nodesForSchedulingBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 2;
*/
public Builder addNodesForScheduling(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto value) {
if (nodesForSchedulingBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNodesForSchedulingIsMutable();
nodesForScheduling_.add(value);
onChanged();
} else {
nodesForSchedulingBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 2;
*/
public Builder addNodesForScheduling(
int index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto value) {
if (nodesForSchedulingBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNodesForSchedulingIsMutable();
nodesForScheduling_.add(index, value);
onChanged();
} else {
nodesForSchedulingBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 2;
*/
public Builder addNodesForScheduling(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto.Builder builderForValue) {
if (nodesForSchedulingBuilder_ == null) {
ensureNodesForSchedulingIsMutable();
nodesForScheduling_.add(builderForValue.build());
onChanged();
} else {
nodesForSchedulingBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 2;
*/
public Builder addNodesForScheduling(
int index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto.Builder builderForValue) {
if (nodesForSchedulingBuilder_ == null) {
ensureNodesForSchedulingIsMutable();
nodesForScheduling_.add(index, builderForValue.build());
onChanged();
} else {
nodesForSchedulingBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 2;
*/
public Builder addAllNodesForScheduling(
java.lang.Iterable extends org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto> values) {
if (nodesForSchedulingBuilder_ == null) {
ensureNodesForSchedulingIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, nodesForScheduling_);
onChanged();
} else {
nodesForSchedulingBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 2;
*/
public Builder clearNodesForScheduling() {
if (nodesForSchedulingBuilder_ == null) {
nodesForScheduling_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
nodesForSchedulingBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 2;
*/
public Builder removeNodesForScheduling(int index) {
if (nodesForSchedulingBuilder_ == null) {
ensureNodesForSchedulingIsMutable();
nodesForScheduling_.remove(index);
onChanged();
} else {
nodesForSchedulingBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 2;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto.Builder getNodesForSchedulingBuilder(
int index) {
return getNodesForSchedulingFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 2;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProtoOrBuilder getNodesForSchedulingOrBuilder(
int index) {
if (nodesForSchedulingBuilder_ == null) {
return nodesForScheduling_.get(index); } else {
return nodesForSchedulingBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 2;
*/
public java.util.List extends org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProtoOrBuilder>
getNodesForSchedulingOrBuilderList() {
if (nodesForSchedulingBuilder_ != null) {
return nodesForSchedulingBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(nodesForScheduling_);
}
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 2;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto.Builder addNodesForSchedulingBuilder() {
return getNodesForSchedulingFieldBuilder().addBuilder(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 2;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto.Builder addNodesForSchedulingBuilder(
int index) {
return getNodesForSchedulingFieldBuilder().addBuilder(
index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.RemoteNodeProto nodes_for_scheduling = 2;
*/
public java.util.List
getNodesForSchedulingBuilderList() {
return getNodesForSchedulingFieldBuilder().getBuilderList();
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProtoOrBuilder>
getNodesForSchedulingFieldBuilder() {
if (nodesForSchedulingBuilder_ == null) {
nodesForSchedulingBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RemoteNodeProtoOrBuilder>(
nodesForScheduling_,
((bitField0_ & 0x00000002) != 0),
getParentForChildren(),
isClean());
nodesForScheduling_ = null;
}
return nodesForSchedulingBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.DistributedSchedulingAllocateResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.DistributedSchedulingAllocateResponseProto)
private static final org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto();
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public DistributedSchedulingAllocateResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface DistributedSchedulingAllocateRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.DistributedSchedulingAllocateRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.AllocateRequestProto allocate_request = 1;
* @return Whether the allocateRequest field is set.
*/
boolean hasAllocateRequest();
/**
* optional .hadoop.yarn.AllocateRequestProto allocate_request = 1;
* @return The allocateRequest.
*/
org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto getAllocateRequest();
/**
* optional .hadoop.yarn.AllocateRequestProto allocate_request = 1;
*/
org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProtoOrBuilder getAllocateRequestOrBuilder();
/**
* repeated .hadoop.yarn.ContainerProto allocated_containers = 2;
*/
java.util.List
getAllocatedContainersList();
/**
* repeated .hadoop.yarn.ContainerProto allocated_containers = 2;
*/
org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getAllocatedContainers(int index);
/**
* repeated .hadoop.yarn.ContainerProto allocated_containers = 2;
*/
int getAllocatedContainersCount();
/**
* repeated .hadoop.yarn.ContainerProto allocated_containers = 2;
*/
java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder>
getAllocatedContainersOrBuilderList();
/**
* repeated .hadoop.yarn.ContainerProto allocated_containers = 2;
*/
org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder getAllocatedContainersOrBuilder(
int index);
}
/**
* Protobuf type {@code hadoop.yarn.DistributedSchedulingAllocateRequestProto}
*/
public static final class DistributedSchedulingAllocateRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.DistributedSchedulingAllocateRequestProto)
DistributedSchedulingAllocateRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use DistributedSchedulingAllocateRequestProto.newBuilder() to construct.
private DistributedSchedulingAllocateRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private DistributedSchedulingAllocateRequestProto() {
allocatedContainers_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new DistributedSchedulingAllocateRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_DistributedSchedulingAllocateRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_DistributedSchedulingAllocateRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto.Builder.class);
}
private int bitField0_;
public static final int ALLOCATE_REQUEST_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto allocateRequest_;
/**
* optional .hadoop.yarn.AllocateRequestProto allocate_request = 1;
* @return Whether the allocateRequest field is set.
*/
@java.lang.Override
public boolean hasAllocateRequest() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.AllocateRequestProto allocate_request = 1;
* @return The allocateRequest.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto getAllocateRequest() {
return allocateRequest_ == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto.getDefaultInstance() : allocateRequest_;
}
/**
* optional .hadoop.yarn.AllocateRequestProto allocate_request = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProtoOrBuilder getAllocateRequestOrBuilder() {
return allocateRequest_ == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto.getDefaultInstance() : allocateRequest_;
}
public static final int ALLOCATED_CONTAINERS_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private java.util.List allocatedContainers_;
/**
* repeated .hadoop.yarn.ContainerProto allocated_containers = 2;
*/
@java.lang.Override
public java.util.List getAllocatedContainersList() {
return allocatedContainers_;
}
/**
* repeated .hadoop.yarn.ContainerProto allocated_containers = 2;
*/
@java.lang.Override
public java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder>
getAllocatedContainersOrBuilderList() {
return allocatedContainers_;
}
/**
* repeated .hadoop.yarn.ContainerProto allocated_containers = 2;
*/
@java.lang.Override
public int getAllocatedContainersCount() {
return allocatedContainers_.size();
}
/**
* repeated .hadoop.yarn.ContainerProto allocated_containers = 2;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getAllocatedContainers(int index) {
return allocatedContainers_.get(index);
}
/**
* repeated .hadoop.yarn.ContainerProto allocated_containers = 2;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder getAllocatedContainersOrBuilder(
int index) {
return allocatedContainers_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (hasAllocateRequest()) {
if (!getAllocateRequest().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
for (int i = 0; i < getAllocatedContainersCount(); i++) {
if (!getAllocatedContainers(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getAllocateRequest());
}
for (int i = 0; i < allocatedContainers_.size(); i++) {
output.writeMessage(2, allocatedContainers_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getAllocateRequest());
}
for (int i = 0; i < allocatedContainers_.size(); i++) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(2, allocatedContainers_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto other = (org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto) obj;
if (hasAllocateRequest() != other.hasAllocateRequest()) return false;
if (hasAllocateRequest()) {
if (!getAllocateRequest()
.equals(other.getAllocateRequest())) return false;
}
if (!getAllocatedContainersList()
.equals(other.getAllocatedContainersList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasAllocateRequest()) {
hash = (37 * hash) + ALLOCATE_REQUEST_FIELD_NUMBER;
hash = (53 * hash) + getAllocateRequest().hashCode();
}
if (getAllocatedContainersCount() > 0) {
hash = (37 * hash) + ALLOCATED_CONTAINERS_FIELD_NUMBER;
hash = (53 * hash) + getAllocatedContainersList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.DistributedSchedulingAllocateRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.DistributedSchedulingAllocateRequestProto)
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_DistributedSchedulingAllocateRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_DistributedSchedulingAllocateRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getAllocateRequestFieldBuilder();
getAllocatedContainersFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
allocateRequest_ = null;
if (allocateRequestBuilder_ != null) {
allocateRequestBuilder_.dispose();
allocateRequestBuilder_ = null;
}
if (allocatedContainersBuilder_ == null) {
allocatedContainers_ = java.util.Collections.emptyList();
} else {
allocatedContainers_ = null;
allocatedContainersBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_DistributedSchedulingAllocateRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto build() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto result = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto result) {
if (allocatedContainersBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)) {
allocatedContainers_ = java.util.Collections.unmodifiableList(allocatedContainers_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.allocatedContainers_ = allocatedContainers_;
} else {
result.allocatedContainers_ = allocatedContainersBuilder_.build();
}
}
private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.allocateRequest_ = allocateRequestBuilder_ == null
? allocateRequest_
: allocateRequestBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto) {
return mergeFrom((org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto.getDefaultInstance()) return this;
if (other.hasAllocateRequest()) {
mergeAllocateRequest(other.getAllocateRequest());
}
if (allocatedContainersBuilder_ == null) {
if (!other.allocatedContainers_.isEmpty()) {
if (allocatedContainers_.isEmpty()) {
allocatedContainers_ = other.allocatedContainers_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureAllocatedContainersIsMutable();
allocatedContainers_.addAll(other.allocatedContainers_);
}
onChanged();
}
} else {
if (!other.allocatedContainers_.isEmpty()) {
if (allocatedContainersBuilder_.isEmpty()) {
allocatedContainersBuilder_.dispose();
allocatedContainersBuilder_ = null;
allocatedContainers_ = other.allocatedContainers_;
bitField0_ = (bitField0_ & ~0x00000002);
allocatedContainersBuilder_ =
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getAllocatedContainersFieldBuilder() : null;
} else {
allocatedContainersBuilder_.addAllMessages(other.allocatedContainers_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (hasAllocateRequest()) {
if (!getAllocateRequest().isInitialized()) {
return false;
}
}
for (int i = 0; i < getAllocatedContainersCount(); i++) {
if (!getAllocatedContainers(i).isInitialized()) {
return false;
}
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getAllocateRequestFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto m =
input.readMessage(
org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.PARSER,
extensionRegistry);
if (allocatedContainersBuilder_ == null) {
ensureAllocatedContainersIsMutable();
allocatedContainers_.add(m);
} else {
allocatedContainersBuilder_.addMessage(m);
}
break;
} // case 18
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto allocateRequest_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProtoOrBuilder> allocateRequestBuilder_;
/**
* optional .hadoop.yarn.AllocateRequestProto allocate_request = 1;
* @return Whether the allocateRequest field is set.
*/
public boolean hasAllocateRequest() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.AllocateRequestProto allocate_request = 1;
* @return The allocateRequest.
*/
public org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto getAllocateRequest() {
if (allocateRequestBuilder_ == null) {
return allocateRequest_ == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto.getDefaultInstance() : allocateRequest_;
} else {
return allocateRequestBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.AllocateRequestProto allocate_request = 1;
*/
public Builder setAllocateRequest(org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto value) {
if (allocateRequestBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
allocateRequest_ = value;
} else {
allocateRequestBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.AllocateRequestProto allocate_request = 1;
*/
public Builder setAllocateRequest(
org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto.Builder builderForValue) {
if (allocateRequestBuilder_ == null) {
allocateRequest_ = builderForValue.build();
} else {
allocateRequestBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.AllocateRequestProto allocate_request = 1;
*/
public Builder mergeAllocateRequest(org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto value) {
if (allocateRequestBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
allocateRequest_ != null &&
allocateRequest_ != org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto.getDefaultInstance()) {
getAllocateRequestBuilder().mergeFrom(value);
} else {
allocateRequest_ = value;
}
} else {
allocateRequestBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.AllocateRequestProto allocate_request = 1;
*/
public Builder clearAllocateRequest() {
bitField0_ = (bitField0_ & ~0x00000001);
allocateRequest_ = null;
if (allocateRequestBuilder_ != null) {
allocateRequestBuilder_.dispose();
allocateRequestBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.AllocateRequestProto allocate_request = 1;
*/
public org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto.Builder getAllocateRequestBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getAllocateRequestFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.AllocateRequestProto allocate_request = 1;
*/
public org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProtoOrBuilder getAllocateRequestOrBuilder() {
if (allocateRequestBuilder_ != null) {
return allocateRequestBuilder_.getMessageOrBuilder();
} else {
return allocateRequest_ == null ?
org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto.getDefaultInstance() : allocateRequest_;
}
}
/**
* optional .hadoop.yarn.AllocateRequestProto allocate_request = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProtoOrBuilder>
getAllocateRequestFieldBuilder() {
if (allocateRequestBuilder_ == null) {
allocateRequestBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProtoOrBuilder>(
getAllocateRequest(),
getParentForChildren(),
isClean());
allocateRequest_ = null;
}
return allocateRequestBuilder_;
}
private java.util.List allocatedContainers_ =
java.util.Collections.emptyList();
private void ensureAllocatedContainersIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
allocatedContainers_ = new java.util.ArrayList(allocatedContainers_);
bitField0_ |= 0x00000002;
}
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder> allocatedContainersBuilder_;
/**
* repeated .hadoop.yarn.ContainerProto allocated_containers = 2;
*/
public java.util.List getAllocatedContainersList() {
if (allocatedContainersBuilder_ == null) {
return java.util.Collections.unmodifiableList(allocatedContainers_);
} else {
return allocatedContainersBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.yarn.ContainerProto allocated_containers = 2;
*/
public int getAllocatedContainersCount() {
if (allocatedContainersBuilder_ == null) {
return allocatedContainers_.size();
} else {
return allocatedContainersBuilder_.getCount();
}
}
/**
* repeated .hadoop.yarn.ContainerProto allocated_containers = 2;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getAllocatedContainers(int index) {
if (allocatedContainersBuilder_ == null) {
return allocatedContainers_.get(index);
} else {
return allocatedContainersBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.yarn.ContainerProto allocated_containers = 2;
*/
public Builder setAllocatedContainers(
int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto value) {
if (allocatedContainersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAllocatedContainersIsMutable();
allocatedContainers_.set(index, value);
onChanged();
} else {
allocatedContainersBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.ContainerProto allocated_containers = 2;
*/
public Builder setAllocatedContainers(
int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder builderForValue) {
if (allocatedContainersBuilder_ == null) {
ensureAllocatedContainersIsMutable();
allocatedContainers_.set(index, builderForValue.build());
onChanged();
} else {
allocatedContainersBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.ContainerProto allocated_containers = 2;
*/
public Builder addAllocatedContainers(org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto value) {
if (allocatedContainersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAllocatedContainersIsMutable();
allocatedContainers_.add(value);
onChanged();
} else {
allocatedContainersBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.yarn.ContainerProto allocated_containers = 2;
*/
public Builder addAllocatedContainers(
int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto value) {
if (allocatedContainersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAllocatedContainersIsMutable();
allocatedContainers_.add(index, value);
onChanged();
} else {
allocatedContainersBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.ContainerProto allocated_containers = 2;
*/
public Builder addAllocatedContainers(
org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder builderForValue) {
if (allocatedContainersBuilder_ == null) {
ensureAllocatedContainersIsMutable();
allocatedContainers_.add(builderForValue.build());
onChanged();
} else {
allocatedContainersBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.ContainerProto allocated_containers = 2;
*/
public Builder addAllocatedContainers(
int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder builderForValue) {
if (allocatedContainersBuilder_ == null) {
ensureAllocatedContainersIsMutable();
allocatedContainers_.add(index, builderForValue.build());
onChanged();
} else {
allocatedContainersBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.ContainerProto allocated_containers = 2;
*/
public Builder addAllAllocatedContainers(
java.lang.Iterable extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto> values) {
if (allocatedContainersBuilder_ == null) {
ensureAllocatedContainersIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, allocatedContainers_);
onChanged();
} else {
allocatedContainersBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.yarn.ContainerProto allocated_containers = 2;
*/
public Builder clearAllocatedContainers() {
if (allocatedContainersBuilder_ == null) {
allocatedContainers_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
allocatedContainersBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.yarn.ContainerProto allocated_containers = 2;
*/
public Builder removeAllocatedContainers(int index) {
if (allocatedContainersBuilder_ == null) {
ensureAllocatedContainersIsMutable();
allocatedContainers_.remove(index);
onChanged();
} else {
allocatedContainersBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.yarn.ContainerProto allocated_containers = 2;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder getAllocatedContainersBuilder(
int index) {
return getAllocatedContainersFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.yarn.ContainerProto allocated_containers = 2;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder getAllocatedContainersOrBuilder(
int index) {
if (allocatedContainersBuilder_ == null) {
return allocatedContainers_.get(index); } else {
return allocatedContainersBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.yarn.ContainerProto allocated_containers = 2;
*/
public java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder>
getAllocatedContainersOrBuilderList() {
if (allocatedContainersBuilder_ != null) {
return allocatedContainersBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(allocatedContainers_);
}
}
/**
* repeated .hadoop.yarn.ContainerProto allocated_containers = 2;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder addAllocatedContainersBuilder() {
return getAllocatedContainersFieldBuilder().addBuilder(
org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.ContainerProto allocated_containers = 2;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder addAllocatedContainersBuilder(
int index) {
return getAllocatedContainersFieldBuilder().addBuilder(
index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.ContainerProto allocated_containers = 2;
*/
public java.util.List
getAllocatedContainersBuilderList() {
return getAllocatedContainersFieldBuilder().getBuilderList();
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder>
getAllocatedContainersFieldBuilder() {
if (allocatedContainersBuilder_ == null) {
allocatedContainersBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder>(
allocatedContainers_,
((bitField0_ & 0x00000002) != 0),
getParentForChildren(),
isClean());
allocatedContainers_ = null;
}
return allocatedContainersBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.DistributedSchedulingAllocateRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.DistributedSchedulingAllocateRequestProto)
private static final org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto();
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public DistributedSchedulingAllocateRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.DistributedSchedulingAllocateRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface NodeLabelsProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.NodeLabelsProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 1;
*/
java.util.List
getNodeLabelsList();
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 1;
*/
org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto getNodeLabels(int index);
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 1;
*/
int getNodeLabelsCount();
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 1;
*/
java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProtoOrBuilder>
getNodeLabelsOrBuilderList();
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 1;
*/
org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProtoOrBuilder getNodeLabelsOrBuilder(
int index);
}
/**
* Protobuf type {@code hadoop.yarn.NodeLabelsProto}
*/
public static final class NodeLabelsProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.NodeLabelsProto)
NodeLabelsProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use NodeLabelsProto.newBuilder() to construct.
private NodeLabelsProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private NodeLabelsProto() {
nodeLabels_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new NodeLabelsProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_NodeLabelsProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_NodeLabelsProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto.Builder.class);
}
public static final int NODELABELS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List nodeLabels_;
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 1;
*/
@java.lang.Override
public java.util.List getNodeLabelsList() {
return nodeLabels_;
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 1;
*/
@java.lang.Override
public java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProtoOrBuilder>
getNodeLabelsOrBuilderList() {
return nodeLabels_;
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 1;
*/
@java.lang.Override
public int getNodeLabelsCount() {
return nodeLabels_.size();
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto getNodeLabels(int index) {
return nodeLabels_.get(index);
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProtoOrBuilder getNodeLabelsOrBuilder(
int index) {
return nodeLabels_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < nodeLabels_.size(); i++) {
output.writeMessage(1, nodeLabels_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < nodeLabels_.size(); i++) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, nodeLabels_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto other = (org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto) obj;
if (!getNodeLabelsList()
.equals(other.getNodeLabelsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getNodeLabelsCount() > 0) {
hash = (37 * hash) + NODELABELS_FIELD_NUMBER;
hash = (53 * hash) + getNodeLabelsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.NodeLabelsProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.NodeLabelsProto)
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_NodeLabelsProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_NodeLabelsProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (nodeLabelsBuilder_ == null) {
nodeLabels_ = java.util.Collections.emptyList();
} else {
nodeLabels_ = null;
nodeLabelsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_NodeLabelsProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto build() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto result = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto result) {
if (nodeLabelsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
nodeLabels_ = java.util.Collections.unmodifiableList(nodeLabels_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.nodeLabels_ = nodeLabels_;
} else {
result.nodeLabels_ = nodeLabelsBuilder_.build();
}
}
private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto) {
return mergeFrom((org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto.getDefaultInstance()) return this;
if (nodeLabelsBuilder_ == null) {
if (!other.nodeLabels_.isEmpty()) {
if (nodeLabels_.isEmpty()) {
nodeLabels_ = other.nodeLabels_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureNodeLabelsIsMutable();
nodeLabels_.addAll(other.nodeLabels_);
}
onChanged();
}
} else {
if (!other.nodeLabels_.isEmpty()) {
if (nodeLabelsBuilder_.isEmpty()) {
nodeLabelsBuilder_.dispose();
nodeLabelsBuilder_ = null;
nodeLabels_ = other.nodeLabels_;
bitField0_ = (bitField0_ & ~0x00000001);
nodeLabelsBuilder_ =
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getNodeLabelsFieldBuilder() : null;
} else {
nodeLabelsBuilder_.addAllMessages(other.nodeLabels_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto m =
input.readMessage(
org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.PARSER,
extensionRegistry);
if (nodeLabelsBuilder_ == null) {
ensureNodeLabelsIsMutable();
nodeLabels_.add(m);
} else {
nodeLabelsBuilder_.addMessage(m);
}
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List nodeLabels_ =
java.util.Collections.emptyList();
private void ensureNodeLabelsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
nodeLabels_ = new java.util.ArrayList(nodeLabels_);
bitField0_ |= 0x00000001;
}
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProtoOrBuilder> nodeLabelsBuilder_;
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 1;
*/
public java.util.List getNodeLabelsList() {
if (nodeLabelsBuilder_ == null) {
return java.util.Collections.unmodifiableList(nodeLabels_);
} else {
return nodeLabelsBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 1;
*/
public int getNodeLabelsCount() {
if (nodeLabelsBuilder_ == null) {
return nodeLabels_.size();
} else {
return nodeLabelsBuilder_.getCount();
}
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto getNodeLabels(int index) {
if (nodeLabelsBuilder_ == null) {
return nodeLabels_.get(index);
} else {
return nodeLabelsBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 1;
*/
public Builder setNodeLabels(
int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto value) {
if (nodeLabelsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNodeLabelsIsMutable();
nodeLabels_.set(index, value);
onChanged();
} else {
nodeLabelsBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 1;
*/
public Builder setNodeLabels(
int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder builderForValue) {
if (nodeLabelsBuilder_ == null) {
ensureNodeLabelsIsMutable();
nodeLabels_.set(index, builderForValue.build());
onChanged();
} else {
nodeLabelsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 1;
*/
public Builder addNodeLabels(org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto value) {
if (nodeLabelsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNodeLabelsIsMutable();
nodeLabels_.add(value);
onChanged();
} else {
nodeLabelsBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 1;
*/
public Builder addNodeLabels(
int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto value) {
if (nodeLabelsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNodeLabelsIsMutable();
nodeLabels_.add(index, value);
onChanged();
} else {
nodeLabelsBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 1;
*/
public Builder addNodeLabels(
org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder builderForValue) {
if (nodeLabelsBuilder_ == null) {
ensureNodeLabelsIsMutable();
nodeLabels_.add(builderForValue.build());
onChanged();
} else {
nodeLabelsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 1;
*/
public Builder addNodeLabels(
int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder builderForValue) {
if (nodeLabelsBuilder_ == null) {
ensureNodeLabelsIsMutable();
nodeLabels_.add(index, builderForValue.build());
onChanged();
} else {
nodeLabelsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 1;
*/
public Builder addAllNodeLabels(
java.lang.Iterable extends org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto> values) {
if (nodeLabelsBuilder_ == null) {
ensureNodeLabelsIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, nodeLabels_);
onChanged();
} else {
nodeLabelsBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 1;
*/
public Builder clearNodeLabels() {
if (nodeLabelsBuilder_ == null) {
nodeLabels_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
nodeLabelsBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 1;
*/
public Builder removeNodeLabels(int index) {
if (nodeLabelsBuilder_ == null) {
ensureNodeLabelsIsMutable();
nodeLabels_.remove(index);
onChanged();
} else {
nodeLabelsBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder getNodeLabelsBuilder(
int index) {
return getNodeLabelsFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProtoOrBuilder getNodeLabelsOrBuilder(
int index) {
if (nodeLabelsBuilder_ == null) {
return nodeLabels_.get(index); } else {
return nodeLabelsBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 1;
*/
public java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProtoOrBuilder>
getNodeLabelsOrBuilderList() {
if (nodeLabelsBuilder_ != null) {
return nodeLabelsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(nodeLabels_);
}
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder addNodeLabelsBuilder() {
return getNodeLabelsFieldBuilder().addBuilder(
org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder addNodeLabelsBuilder(
int index) {
return getNodeLabelsFieldBuilder().addBuilder(
index, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.NodeLabelProto nodeLabels = 1;
*/
public java.util.List
getNodeLabelsBuilderList() {
return getNodeLabelsFieldBuilder().getBuilderList();
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProtoOrBuilder>
getNodeLabelsFieldBuilder() {
if (nodeLabelsBuilder_ == null) {
nodeLabelsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProtoOrBuilder>(
nodeLabels_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
nodeLabels_ = null;
}
return nodeLabelsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.NodeLabelsProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.NodeLabelsProto)
private static final org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto();
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public NodeLabelsProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface NodeAttributesProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.NodeAttributesProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 1;
*/
java.util.List
getNodeAttributesList();
/**
* repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 1;
*/
org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto getNodeAttributes(int index);
/**
* repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 1;
*/
int getNodeAttributesCount();
/**
* repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 1;
*/
java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder>
getNodeAttributesOrBuilderList();
/**
* repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 1;
*/
org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder getNodeAttributesOrBuilder(
int index);
}
/**
* Protobuf type {@code hadoop.yarn.NodeAttributesProto}
*/
public static final class NodeAttributesProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.NodeAttributesProto)
NodeAttributesProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use NodeAttributesProto.newBuilder() to construct.
private NodeAttributesProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private NodeAttributesProto() {
nodeAttributes_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new NodeAttributesProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_NodeAttributesProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_NodeAttributesProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto.Builder.class);
}
public static final int NODEATTRIBUTES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List nodeAttributes_;
/**
* repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 1;
*/
@java.lang.Override
public java.util.List getNodeAttributesList() {
return nodeAttributes_;
}
/**
* repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 1;
*/
@java.lang.Override
public java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder>
getNodeAttributesOrBuilderList() {
return nodeAttributes_;
}
/**
* repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 1;
*/
@java.lang.Override
public int getNodeAttributesCount() {
return nodeAttributes_.size();
}
/**
* repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto getNodeAttributes(int index) {
return nodeAttributes_.get(index);
}
/**
* repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder getNodeAttributesOrBuilder(
int index) {
return nodeAttributes_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
for (int i = 0; i < getNodeAttributesCount(); i++) {
if (!getNodeAttributes(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < nodeAttributes_.size(); i++) {
output.writeMessage(1, nodeAttributes_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < nodeAttributes_.size(); i++) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, nodeAttributes_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto other = (org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto) obj;
if (!getNodeAttributesList()
.equals(other.getNodeAttributesList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getNodeAttributesCount() > 0) {
hash = (37 * hash) + NODEATTRIBUTES_FIELD_NUMBER;
hash = (53 * hash) + getNodeAttributesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.NodeAttributesProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.NodeAttributesProto)
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_NodeAttributesProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_NodeAttributesProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (nodeAttributesBuilder_ == null) {
nodeAttributes_ = java.util.Collections.emptyList();
} else {
nodeAttributes_ = null;
nodeAttributesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_NodeAttributesProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto build() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto result = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto result) {
if (nodeAttributesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
nodeAttributes_ = java.util.Collections.unmodifiableList(nodeAttributes_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.nodeAttributes_ = nodeAttributes_;
} else {
result.nodeAttributes_ = nodeAttributesBuilder_.build();
}
}
private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto) {
return mergeFrom((org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto.getDefaultInstance()) return this;
if (nodeAttributesBuilder_ == null) {
if (!other.nodeAttributes_.isEmpty()) {
if (nodeAttributes_.isEmpty()) {
nodeAttributes_ = other.nodeAttributes_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureNodeAttributesIsMutable();
nodeAttributes_.addAll(other.nodeAttributes_);
}
onChanged();
}
} else {
if (!other.nodeAttributes_.isEmpty()) {
if (nodeAttributesBuilder_.isEmpty()) {
nodeAttributesBuilder_.dispose();
nodeAttributesBuilder_ = null;
nodeAttributes_ = other.nodeAttributes_;
bitField0_ = (bitField0_ & ~0x00000001);
nodeAttributesBuilder_ =
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getNodeAttributesFieldBuilder() : null;
} else {
nodeAttributesBuilder_.addAllMessages(other.nodeAttributes_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
for (int i = 0; i < getNodeAttributesCount(); i++) {
if (!getNodeAttributes(i).isInitialized()) {
return false;
}
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto m =
input.readMessage(
org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.PARSER,
extensionRegistry);
if (nodeAttributesBuilder_ == null) {
ensureNodeAttributesIsMutable();
nodeAttributes_.add(m);
} else {
nodeAttributesBuilder_.addMessage(m);
}
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List nodeAttributes_ =
java.util.Collections.emptyList();
private void ensureNodeAttributesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
nodeAttributes_ = new java.util.ArrayList(nodeAttributes_);
bitField0_ |= 0x00000001;
}
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder> nodeAttributesBuilder_;
/**
* repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 1;
*/
public java.util.List getNodeAttributesList() {
if (nodeAttributesBuilder_ == null) {
return java.util.Collections.unmodifiableList(nodeAttributes_);
} else {
return nodeAttributesBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 1;
*/
public int getNodeAttributesCount() {
if (nodeAttributesBuilder_ == null) {
return nodeAttributes_.size();
} else {
return nodeAttributesBuilder_.getCount();
}
}
/**
* repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto getNodeAttributes(int index) {
if (nodeAttributesBuilder_ == null) {
return nodeAttributes_.get(index);
} else {
return nodeAttributesBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 1;
*/
public Builder setNodeAttributes(
int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto value) {
if (nodeAttributesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNodeAttributesIsMutable();
nodeAttributes_.set(index, value);
onChanged();
} else {
nodeAttributesBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 1;
*/
public Builder setNodeAttributes(
int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder builderForValue) {
if (nodeAttributesBuilder_ == null) {
ensureNodeAttributesIsMutable();
nodeAttributes_.set(index, builderForValue.build());
onChanged();
} else {
nodeAttributesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 1;
*/
public Builder addNodeAttributes(org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto value) {
if (nodeAttributesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNodeAttributesIsMutable();
nodeAttributes_.add(value);
onChanged();
} else {
nodeAttributesBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 1;
*/
public Builder addNodeAttributes(
int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto value) {
if (nodeAttributesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNodeAttributesIsMutable();
nodeAttributes_.add(index, value);
onChanged();
} else {
nodeAttributesBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 1;
*/
public Builder addNodeAttributes(
org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder builderForValue) {
if (nodeAttributesBuilder_ == null) {
ensureNodeAttributesIsMutable();
nodeAttributes_.add(builderForValue.build());
onChanged();
} else {
nodeAttributesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 1;
*/
public Builder addNodeAttributes(
int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder builderForValue) {
if (nodeAttributesBuilder_ == null) {
ensureNodeAttributesIsMutable();
nodeAttributes_.add(index, builderForValue.build());
onChanged();
} else {
nodeAttributesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 1;
*/
public Builder addAllNodeAttributes(
java.lang.Iterable extends org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto> values) {
if (nodeAttributesBuilder_ == null) {
ensureNodeAttributesIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, nodeAttributes_);
onChanged();
} else {
nodeAttributesBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 1;
*/
public Builder clearNodeAttributes() {
if (nodeAttributesBuilder_ == null) {
nodeAttributes_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
nodeAttributesBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 1;
*/
public Builder removeNodeAttributes(int index) {
if (nodeAttributesBuilder_ == null) {
ensureNodeAttributesIsMutable();
nodeAttributes_.remove(index);
onChanged();
} else {
nodeAttributesBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder getNodeAttributesBuilder(
int index) {
return getNodeAttributesFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder getNodeAttributesOrBuilder(
int index) {
if (nodeAttributesBuilder_ == null) {
return nodeAttributes_.get(index); } else {
return nodeAttributesBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 1;
*/
public java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder>
getNodeAttributesOrBuilderList() {
if (nodeAttributesBuilder_ != null) {
return nodeAttributesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(nodeAttributes_);
}
}
/**
* repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder addNodeAttributesBuilder() {
return getNodeAttributesFieldBuilder().addBuilder(
org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder addNodeAttributesBuilder(
int index) {
return getNodeAttributesFieldBuilder().addBuilder(
index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 1;
*/
public java.util.List
getNodeAttributesBuilderList() {
return getNodeAttributesFieldBuilder().getBuilderList();
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder>
getNodeAttributesFieldBuilder() {
if (nodeAttributesBuilder_ == null) {
nodeAttributesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder>(
nodeAttributes_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
nodeAttributes_ = null;
}
return nodeAttributesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.NodeAttributesProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.NodeAttributesProto)
private static final org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto();
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public NodeAttributesProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface RegisterNodeManagerRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.RegisterNodeManagerRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
* @return Whether the nodeId field is set.
*/
boolean hasNodeId();
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
* @return The nodeId.
*/
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId();
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
*/
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder();
/**
* optional int32 http_port = 3;
* @return Whether the httpPort field is set.
*/
boolean hasHttpPort();
/**
* optional int32 http_port = 3;
* @return The httpPort.
*/
int getHttpPort();
/**
* optional .hadoop.yarn.ResourceProto resource = 4;
* @return Whether the resource field is set.
*/
boolean hasResource();
/**
* optional .hadoop.yarn.ResourceProto resource = 4;
* @return The resource.
*/
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource();
/**
* optional .hadoop.yarn.ResourceProto resource = 4;
*/
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder();
/**
* optional string nm_version = 5;
* @return Whether the nmVersion field is set.
*/
boolean hasNmVersion();
/**
* optional string nm_version = 5;
* @return The nmVersion.
*/
java.lang.String getNmVersion();
/**
* optional string nm_version = 5;
* @return The bytes for nmVersion.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getNmVersionBytes();
/**
* repeated .hadoop.yarn.NMContainerStatusProto container_statuses = 6;
*/
java.util.List
getContainerStatusesList();
/**
* repeated .hadoop.yarn.NMContainerStatusProto container_statuses = 6;
*/
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto getContainerStatuses(int index);
/**
* repeated .hadoop.yarn.NMContainerStatusProto container_statuses = 6;
*/
int getContainerStatusesCount();
/**
* repeated .hadoop.yarn.NMContainerStatusProto container_statuses = 6;
*/
java.util.List extends org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProtoOrBuilder>
getContainerStatusesOrBuilderList();
/**
* repeated .hadoop.yarn.NMContainerStatusProto container_statuses = 6;
*/
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProtoOrBuilder getContainerStatusesOrBuilder(
int index);
/**
* repeated .hadoop.yarn.ApplicationIdProto runningApplications = 7;
*/
java.util.List
getRunningApplicationsList();
/**
* repeated .hadoop.yarn.ApplicationIdProto runningApplications = 7;
*/
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getRunningApplications(int index);
/**
* repeated .hadoop.yarn.ApplicationIdProto runningApplications = 7;
*/
int getRunningApplicationsCount();
/**
* repeated .hadoop.yarn.ApplicationIdProto runningApplications = 7;
*/
java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>
getRunningApplicationsOrBuilderList();
/**
* repeated .hadoop.yarn.ApplicationIdProto runningApplications = 7;
*/
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getRunningApplicationsOrBuilder(
int index);
/**
* optional .hadoop.yarn.NodeLabelsProto nodeLabels = 8;
* @return Whether the nodeLabels field is set.
*/
boolean hasNodeLabels();
/**
* optional .hadoop.yarn.NodeLabelsProto nodeLabels = 8;
* @return The nodeLabels.
*/
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto getNodeLabels();
/**
* optional .hadoop.yarn.NodeLabelsProto nodeLabels = 8;
*/
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProtoOrBuilder getNodeLabelsOrBuilder();
/**
* optional .hadoop.yarn.ResourceProto physicalResource = 9;
* @return Whether the physicalResource field is set.
*/
boolean hasPhysicalResource();
/**
* optional .hadoop.yarn.ResourceProto physicalResource = 9;
* @return The physicalResource.
*/
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getPhysicalResource();
/**
* optional .hadoop.yarn.ResourceProto physicalResource = 9;
*/
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getPhysicalResourceOrBuilder();
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 10;
*/
java.util.List
getLogAggregationReportsForAppsList();
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 10;
*/
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto getLogAggregationReportsForApps(int index);
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 10;
*/
int getLogAggregationReportsForAppsCount();
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 10;
*/
java.util.List extends org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProtoOrBuilder>
getLogAggregationReportsForAppsOrBuilderList();
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 10;
*/
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProtoOrBuilder getLogAggregationReportsForAppsOrBuilder(
int index);
/**
* optional .hadoop.yarn.NodeAttributesProto nodeAttributes = 11;
* @return Whether the nodeAttributes field is set.
*/
boolean hasNodeAttributes();
/**
* optional .hadoop.yarn.NodeAttributesProto nodeAttributes = 11;
* @return The nodeAttributes.
*/
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto getNodeAttributes();
/**
* optional .hadoop.yarn.NodeAttributesProto nodeAttributes = 11;
*/
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProtoOrBuilder getNodeAttributesOrBuilder();
/**
* optional .hadoop.yarn.NodeStatusProto nodeStatus = 12;
* @return Whether the nodeStatus field is set.
*/
boolean hasNodeStatus();
/**
* optional .hadoop.yarn.NodeStatusProto nodeStatus = 12;
* @return The nodeStatus.
*/
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto getNodeStatus();
/**
* optional .hadoop.yarn.NodeStatusProto nodeStatus = 12;
*/
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProtoOrBuilder getNodeStatusOrBuilder();
}
/**
* Protobuf type {@code hadoop.yarn.RegisterNodeManagerRequestProto}
*/
public static final class RegisterNodeManagerRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.RegisterNodeManagerRequestProto)
RegisterNodeManagerRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use RegisterNodeManagerRequestProto.newBuilder() to construct.
private RegisterNodeManagerRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private RegisterNodeManagerRequestProto() {
nmVersion_ = "";
containerStatuses_ = java.util.Collections.emptyList();
runningApplications_ = java.util.Collections.emptyList();
logAggregationReportsForApps_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new RegisterNodeManagerRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_RegisterNodeManagerRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_RegisterNodeManagerRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto.Builder.class);
}
private int bitField0_;
public static final int NODE_ID_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto nodeId_;
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
* @return Whether the nodeId field is set.
*/
@java.lang.Override
public boolean hasNodeId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
* @return The nodeId.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId() {
return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
}
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder() {
return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
}
public static final int HTTP_PORT_FIELD_NUMBER = 3;
private int httpPort_ = 0;
/**
* optional int32 http_port = 3;
* @return Whether the httpPort field is set.
*/
@java.lang.Override
public boolean hasHttpPort() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional int32 http_port = 3;
* @return The httpPort.
*/
@java.lang.Override
public int getHttpPort() {
return httpPort_;
}
public static final int RESOURCE_FIELD_NUMBER = 4;
private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resource_;
/**
* optional .hadoop.yarn.ResourceProto resource = 4;
* @return Whether the resource field is set.
*/
@java.lang.Override
public boolean hasResource() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional .hadoop.yarn.ResourceProto resource = 4;
* @return The resource.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource() {
return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
}
/**
* optional .hadoop.yarn.ResourceProto resource = 4;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder() {
return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
}
public static final int NM_VERSION_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private volatile java.lang.Object nmVersion_ = "";
/**
* optional string nm_version = 5;
* @return Whether the nmVersion field is set.
*/
@java.lang.Override
public boolean hasNmVersion() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* optional string nm_version = 5;
* @return The nmVersion.
*/
@java.lang.Override
public java.lang.String getNmVersion() {
java.lang.Object ref = nmVersion_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
nmVersion_ = s;
}
return s;
}
}
/**
* optional string nm_version = 5;
* @return The bytes for nmVersion.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getNmVersionBytes() {
java.lang.Object ref = nmVersion_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
nmVersion_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int CONTAINER_STATUSES_FIELD_NUMBER = 6;
@SuppressWarnings("serial")
private java.util.List containerStatuses_;
/**
* repeated .hadoop.yarn.NMContainerStatusProto container_statuses = 6;
*/
@java.lang.Override
public java.util.List getContainerStatusesList() {
return containerStatuses_;
}
/**
* repeated .hadoop.yarn.NMContainerStatusProto container_statuses = 6;
*/
@java.lang.Override
public java.util.List extends org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProtoOrBuilder>
getContainerStatusesOrBuilderList() {
return containerStatuses_;
}
/**
* repeated .hadoop.yarn.NMContainerStatusProto container_statuses = 6;
*/
@java.lang.Override
public int getContainerStatusesCount() {
return containerStatuses_.size();
}
/**
* repeated .hadoop.yarn.NMContainerStatusProto container_statuses = 6;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto getContainerStatuses(int index) {
return containerStatuses_.get(index);
}
/**
* repeated .hadoop.yarn.NMContainerStatusProto container_statuses = 6;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProtoOrBuilder getContainerStatusesOrBuilder(
int index) {
return containerStatuses_.get(index);
}
public static final int RUNNINGAPPLICATIONS_FIELD_NUMBER = 7;
@SuppressWarnings("serial")
private java.util.List runningApplications_;
/**
* repeated .hadoop.yarn.ApplicationIdProto runningApplications = 7;
*/
@java.lang.Override
public java.util.List getRunningApplicationsList() {
return runningApplications_;
}
/**
* repeated .hadoop.yarn.ApplicationIdProto runningApplications = 7;
*/
@java.lang.Override
public java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>
getRunningApplicationsOrBuilderList() {
return runningApplications_;
}
/**
* repeated .hadoop.yarn.ApplicationIdProto runningApplications = 7;
*/
@java.lang.Override
public int getRunningApplicationsCount() {
return runningApplications_.size();
}
/**
* repeated .hadoop.yarn.ApplicationIdProto runningApplications = 7;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getRunningApplications(int index) {
return runningApplications_.get(index);
}
/**
* repeated .hadoop.yarn.ApplicationIdProto runningApplications = 7;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getRunningApplicationsOrBuilder(
int index) {
return runningApplications_.get(index);
}
public static final int NODELABELS_FIELD_NUMBER = 8;
private org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto nodeLabels_;
/**
* optional .hadoop.yarn.NodeLabelsProto nodeLabels = 8;
* @return Whether the nodeLabels field is set.
*/
@java.lang.Override
public boolean hasNodeLabels() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* optional .hadoop.yarn.NodeLabelsProto nodeLabels = 8;
* @return The nodeLabels.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto getNodeLabels() {
return nodeLabels_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto.getDefaultInstance() : nodeLabels_;
}
/**
* optional .hadoop.yarn.NodeLabelsProto nodeLabels = 8;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProtoOrBuilder getNodeLabelsOrBuilder() {
return nodeLabels_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto.getDefaultInstance() : nodeLabels_;
}
public static final int PHYSICALRESOURCE_FIELD_NUMBER = 9;
private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto physicalResource_;
/**
* optional .hadoop.yarn.ResourceProto physicalResource = 9;
* @return Whether the physicalResource field is set.
*/
@java.lang.Override
public boolean hasPhysicalResource() {
return ((bitField0_ & 0x00000020) != 0);
}
/**
* optional .hadoop.yarn.ResourceProto physicalResource = 9;
* @return The physicalResource.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getPhysicalResource() {
return physicalResource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : physicalResource_;
}
/**
* optional .hadoop.yarn.ResourceProto physicalResource = 9;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getPhysicalResourceOrBuilder() {
return physicalResource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : physicalResource_;
}
public static final int LOG_AGGREGATION_REPORTS_FOR_APPS_FIELD_NUMBER = 10;
@SuppressWarnings("serial")
private java.util.List logAggregationReportsForApps_;
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 10;
*/
@java.lang.Override
public java.util.List getLogAggregationReportsForAppsList() {
return logAggregationReportsForApps_;
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 10;
*/
@java.lang.Override
public java.util.List extends org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProtoOrBuilder>
getLogAggregationReportsForAppsOrBuilderList() {
return logAggregationReportsForApps_;
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 10;
*/
@java.lang.Override
public int getLogAggregationReportsForAppsCount() {
return logAggregationReportsForApps_.size();
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 10;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto getLogAggregationReportsForApps(int index) {
return logAggregationReportsForApps_.get(index);
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 10;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProtoOrBuilder getLogAggregationReportsForAppsOrBuilder(
int index) {
return logAggregationReportsForApps_.get(index);
}
public static final int NODEATTRIBUTES_FIELD_NUMBER = 11;
private org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto nodeAttributes_;
/**
* optional .hadoop.yarn.NodeAttributesProto nodeAttributes = 11;
* @return Whether the nodeAttributes field is set.
*/
@java.lang.Override
public boolean hasNodeAttributes() {
return ((bitField0_ & 0x00000040) != 0);
}
/**
* optional .hadoop.yarn.NodeAttributesProto nodeAttributes = 11;
* @return The nodeAttributes.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto getNodeAttributes() {
return nodeAttributes_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto.getDefaultInstance() : nodeAttributes_;
}
/**
* optional .hadoop.yarn.NodeAttributesProto nodeAttributes = 11;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProtoOrBuilder getNodeAttributesOrBuilder() {
return nodeAttributes_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto.getDefaultInstance() : nodeAttributes_;
}
public static final int NODESTATUS_FIELD_NUMBER = 12;
private org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto nodeStatus_;
/**
* optional .hadoop.yarn.NodeStatusProto nodeStatus = 12;
* @return Whether the nodeStatus field is set.
*/
@java.lang.Override
public boolean hasNodeStatus() {
return ((bitField0_ & 0x00000080) != 0);
}
/**
* optional .hadoop.yarn.NodeStatusProto nodeStatus = 12;
* @return The nodeStatus.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto getNodeStatus() {
return nodeStatus_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto.getDefaultInstance() : nodeStatus_;
}
/**
* optional .hadoop.yarn.NodeStatusProto nodeStatus = 12;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProtoOrBuilder getNodeStatusOrBuilder() {
return nodeStatus_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto.getDefaultInstance() : nodeStatus_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (hasResource()) {
if (!getResource().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
for (int i = 0; i < getContainerStatusesCount(); i++) {
if (!getContainerStatuses(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
if (hasPhysicalResource()) {
if (!getPhysicalResource().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
if (hasNodeAttributes()) {
if (!getNodeAttributes().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
if (hasNodeStatus()) {
if (!getNodeStatus().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getNodeId());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeInt32(3, httpPort_);
}
if (((bitField0_ & 0x00000004) != 0)) {
output.writeMessage(4, getResource());
}
if (((bitField0_ & 0x00000008) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 5, nmVersion_);
}
for (int i = 0; i < containerStatuses_.size(); i++) {
output.writeMessage(6, containerStatuses_.get(i));
}
for (int i = 0; i < runningApplications_.size(); i++) {
output.writeMessage(7, runningApplications_.get(i));
}
if (((bitField0_ & 0x00000010) != 0)) {
output.writeMessage(8, getNodeLabels());
}
if (((bitField0_ & 0x00000020) != 0)) {
output.writeMessage(9, getPhysicalResource());
}
for (int i = 0; i < logAggregationReportsForApps_.size(); i++) {
output.writeMessage(10, logAggregationReportsForApps_.get(i));
}
if (((bitField0_ & 0x00000040) != 0)) {
output.writeMessage(11, getNodeAttributes());
}
if (((bitField0_ & 0x00000080) != 0)) {
output.writeMessage(12, getNodeStatus());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getNodeId());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt32Size(3, httpPort_);
}
if (((bitField0_ & 0x00000004) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(4, getResource());
}
if (((bitField0_ & 0x00000008) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(5, nmVersion_);
}
for (int i = 0; i < containerStatuses_.size(); i++) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(6, containerStatuses_.get(i));
}
for (int i = 0; i < runningApplications_.size(); i++) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(7, runningApplications_.get(i));
}
if (((bitField0_ & 0x00000010) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(8, getNodeLabels());
}
if (((bitField0_ & 0x00000020) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(9, getPhysicalResource());
}
for (int i = 0; i < logAggregationReportsForApps_.size(); i++) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(10, logAggregationReportsForApps_.get(i));
}
if (((bitField0_ & 0x00000040) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(11, getNodeAttributes());
}
if (((bitField0_ & 0x00000080) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(12, getNodeStatus());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto other = (org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto) obj;
if (hasNodeId() != other.hasNodeId()) return false;
if (hasNodeId()) {
if (!getNodeId()
.equals(other.getNodeId())) return false;
}
if (hasHttpPort() != other.hasHttpPort()) return false;
if (hasHttpPort()) {
if (getHttpPort()
!= other.getHttpPort()) return false;
}
if (hasResource() != other.hasResource()) return false;
if (hasResource()) {
if (!getResource()
.equals(other.getResource())) return false;
}
if (hasNmVersion() != other.hasNmVersion()) return false;
if (hasNmVersion()) {
if (!getNmVersion()
.equals(other.getNmVersion())) return false;
}
if (!getContainerStatusesList()
.equals(other.getContainerStatusesList())) return false;
if (!getRunningApplicationsList()
.equals(other.getRunningApplicationsList())) return false;
if (hasNodeLabels() != other.hasNodeLabels()) return false;
if (hasNodeLabels()) {
if (!getNodeLabels()
.equals(other.getNodeLabels())) return false;
}
if (hasPhysicalResource() != other.hasPhysicalResource()) return false;
if (hasPhysicalResource()) {
if (!getPhysicalResource()
.equals(other.getPhysicalResource())) return false;
}
if (!getLogAggregationReportsForAppsList()
.equals(other.getLogAggregationReportsForAppsList())) return false;
if (hasNodeAttributes() != other.hasNodeAttributes()) return false;
if (hasNodeAttributes()) {
if (!getNodeAttributes()
.equals(other.getNodeAttributes())) return false;
}
if (hasNodeStatus() != other.hasNodeStatus()) return false;
if (hasNodeStatus()) {
if (!getNodeStatus()
.equals(other.getNodeStatus())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasNodeId()) {
hash = (37 * hash) + NODE_ID_FIELD_NUMBER;
hash = (53 * hash) + getNodeId().hashCode();
}
if (hasHttpPort()) {
hash = (37 * hash) + HTTP_PORT_FIELD_NUMBER;
hash = (53 * hash) + getHttpPort();
}
if (hasResource()) {
hash = (37 * hash) + RESOURCE_FIELD_NUMBER;
hash = (53 * hash) + getResource().hashCode();
}
if (hasNmVersion()) {
hash = (37 * hash) + NM_VERSION_FIELD_NUMBER;
hash = (53 * hash) + getNmVersion().hashCode();
}
if (getContainerStatusesCount() > 0) {
hash = (37 * hash) + CONTAINER_STATUSES_FIELD_NUMBER;
hash = (53 * hash) + getContainerStatusesList().hashCode();
}
if (getRunningApplicationsCount() > 0) {
hash = (37 * hash) + RUNNINGAPPLICATIONS_FIELD_NUMBER;
hash = (53 * hash) + getRunningApplicationsList().hashCode();
}
if (hasNodeLabels()) {
hash = (37 * hash) + NODELABELS_FIELD_NUMBER;
hash = (53 * hash) + getNodeLabels().hashCode();
}
if (hasPhysicalResource()) {
hash = (37 * hash) + PHYSICALRESOURCE_FIELD_NUMBER;
hash = (53 * hash) + getPhysicalResource().hashCode();
}
if (getLogAggregationReportsForAppsCount() > 0) {
hash = (37 * hash) + LOG_AGGREGATION_REPORTS_FOR_APPS_FIELD_NUMBER;
hash = (53 * hash) + getLogAggregationReportsForAppsList().hashCode();
}
if (hasNodeAttributes()) {
hash = (37 * hash) + NODEATTRIBUTES_FIELD_NUMBER;
hash = (53 * hash) + getNodeAttributes().hashCode();
}
if (hasNodeStatus()) {
hash = (37 * hash) + NODESTATUS_FIELD_NUMBER;
hash = (53 * hash) + getNodeStatus().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.RegisterNodeManagerRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.RegisterNodeManagerRequestProto)
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_RegisterNodeManagerRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_RegisterNodeManagerRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getNodeIdFieldBuilder();
getResourceFieldBuilder();
getContainerStatusesFieldBuilder();
getRunningApplicationsFieldBuilder();
getNodeLabelsFieldBuilder();
getPhysicalResourceFieldBuilder();
getLogAggregationReportsForAppsFieldBuilder();
getNodeAttributesFieldBuilder();
getNodeStatusFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
nodeId_ = null;
if (nodeIdBuilder_ != null) {
nodeIdBuilder_.dispose();
nodeIdBuilder_ = null;
}
httpPort_ = 0;
resource_ = null;
if (resourceBuilder_ != null) {
resourceBuilder_.dispose();
resourceBuilder_ = null;
}
nmVersion_ = "";
if (containerStatusesBuilder_ == null) {
containerStatuses_ = java.util.Collections.emptyList();
} else {
containerStatuses_ = null;
containerStatusesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000010);
if (runningApplicationsBuilder_ == null) {
runningApplications_ = java.util.Collections.emptyList();
} else {
runningApplications_ = null;
runningApplicationsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000020);
nodeLabels_ = null;
if (nodeLabelsBuilder_ != null) {
nodeLabelsBuilder_.dispose();
nodeLabelsBuilder_ = null;
}
physicalResource_ = null;
if (physicalResourceBuilder_ != null) {
physicalResourceBuilder_.dispose();
physicalResourceBuilder_ = null;
}
if (logAggregationReportsForAppsBuilder_ == null) {
logAggregationReportsForApps_ = java.util.Collections.emptyList();
} else {
logAggregationReportsForApps_ = null;
logAggregationReportsForAppsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000100);
nodeAttributes_ = null;
if (nodeAttributesBuilder_ != null) {
nodeAttributesBuilder_.dispose();
nodeAttributesBuilder_ = null;
}
nodeStatus_ = null;
if (nodeStatusBuilder_ != null) {
nodeStatusBuilder_.dispose();
nodeStatusBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_RegisterNodeManagerRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto build() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto result = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto result) {
if (containerStatusesBuilder_ == null) {
if (((bitField0_ & 0x00000010) != 0)) {
containerStatuses_ = java.util.Collections.unmodifiableList(containerStatuses_);
bitField0_ = (bitField0_ & ~0x00000010);
}
result.containerStatuses_ = containerStatuses_;
} else {
result.containerStatuses_ = containerStatusesBuilder_.build();
}
if (runningApplicationsBuilder_ == null) {
if (((bitField0_ & 0x00000020) != 0)) {
runningApplications_ = java.util.Collections.unmodifiableList(runningApplications_);
bitField0_ = (bitField0_ & ~0x00000020);
}
result.runningApplications_ = runningApplications_;
} else {
result.runningApplications_ = runningApplicationsBuilder_.build();
}
if (logAggregationReportsForAppsBuilder_ == null) {
if (((bitField0_ & 0x00000100) != 0)) {
logAggregationReportsForApps_ = java.util.Collections.unmodifiableList(logAggregationReportsForApps_);
bitField0_ = (bitField0_ & ~0x00000100);
}
result.logAggregationReportsForApps_ = logAggregationReportsForApps_;
} else {
result.logAggregationReportsForApps_ = logAggregationReportsForAppsBuilder_.build();
}
}
private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.nodeId_ = nodeIdBuilder_ == null
? nodeId_
: nodeIdBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.httpPort_ = httpPort_;
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.resource_ = resourceBuilder_ == null
? resource_
: resourceBuilder_.build();
to_bitField0_ |= 0x00000004;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.nmVersion_ = nmVersion_;
to_bitField0_ |= 0x00000008;
}
if (((from_bitField0_ & 0x00000040) != 0)) {
result.nodeLabels_ = nodeLabelsBuilder_ == null
? nodeLabels_
: nodeLabelsBuilder_.build();
to_bitField0_ |= 0x00000010;
}
if (((from_bitField0_ & 0x00000080) != 0)) {
result.physicalResource_ = physicalResourceBuilder_ == null
? physicalResource_
: physicalResourceBuilder_.build();
to_bitField0_ |= 0x00000020;
}
if (((from_bitField0_ & 0x00000200) != 0)) {
result.nodeAttributes_ = nodeAttributesBuilder_ == null
? nodeAttributes_
: nodeAttributesBuilder_.build();
to_bitField0_ |= 0x00000040;
}
if (((from_bitField0_ & 0x00000400) != 0)) {
result.nodeStatus_ = nodeStatusBuilder_ == null
? nodeStatus_
: nodeStatusBuilder_.build();
to_bitField0_ |= 0x00000080;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto) {
return mergeFrom((org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto.getDefaultInstance()) return this;
if (other.hasNodeId()) {
mergeNodeId(other.getNodeId());
}
if (other.hasHttpPort()) {
setHttpPort(other.getHttpPort());
}
if (other.hasResource()) {
mergeResource(other.getResource());
}
if (other.hasNmVersion()) {
nmVersion_ = other.nmVersion_;
bitField0_ |= 0x00000008;
onChanged();
}
if (containerStatusesBuilder_ == null) {
if (!other.containerStatuses_.isEmpty()) {
if (containerStatuses_.isEmpty()) {
containerStatuses_ = other.containerStatuses_;
bitField0_ = (bitField0_ & ~0x00000010);
} else {
ensureContainerStatusesIsMutable();
containerStatuses_.addAll(other.containerStatuses_);
}
onChanged();
}
} else {
if (!other.containerStatuses_.isEmpty()) {
if (containerStatusesBuilder_.isEmpty()) {
containerStatusesBuilder_.dispose();
containerStatusesBuilder_ = null;
containerStatuses_ = other.containerStatuses_;
bitField0_ = (bitField0_ & ~0x00000010);
containerStatusesBuilder_ =
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getContainerStatusesFieldBuilder() : null;
} else {
containerStatusesBuilder_.addAllMessages(other.containerStatuses_);
}
}
}
if (runningApplicationsBuilder_ == null) {
if (!other.runningApplications_.isEmpty()) {
if (runningApplications_.isEmpty()) {
runningApplications_ = other.runningApplications_;
bitField0_ = (bitField0_ & ~0x00000020);
} else {
ensureRunningApplicationsIsMutable();
runningApplications_.addAll(other.runningApplications_);
}
onChanged();
}
} else {
if (!other.runningApplications_.isEmpty()) {
if (runningApplicationsBuilder_.isEmpty()) {
runningApplicationsBuilder_.dispose();
runningApplicationsBuilder_ = null;
runningApplications_ = other.runningApplications_;
bitField0_ = (bitField0_ & ~0x00000020);
runningApplicationsBuilder_ =
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getRunningApplicationsFieldBuilder() : null;
} else {
runningApplicationsBuilder_.addAllMessages(other.runningApplications_);
}
}
}
if (other.hasNodeLabels()) {
mergeNodeLabels(other.getNodeLabels());
}
if (other.hasPhysicalResource()) {
mergePhysicalResource(other.getPhysicalResource());
}
if (logAggregationReportsForAppsBuilder_ == null) {
if (!other.logAggregationReportsForApps_.isEmpty()) {
if (logAggregationReportsForApps_.isEmpty()) {
logAggregationReportsForApps_ = other.logAggregationReportsForApps_;
bitField0_ = (bitField0_ & ~0x00000100);
} else {
ensureLogAggregationReportsForAppsIsMutable();
logAggregationReportsForApps_.addAll(other.logAggregationReportsForApps_);
}
onChanged();
}
} else {
if (!other.logAggregationReportsForApps_.isEmpty()) {
if (logAggregationReportsForAppsBuilder_.isEmpty()) {
logAggregationReportsForAppsBuilder_.dispose();
logAggregationReportsForAppsBuilder_ = null;
logAggregationReportsForApps_ = other.logAggregationReportsForApps_;
bitField0_ = (bitField0_ & ~0x00000100);
logAggregationReportsForAppsBuilder_ =
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getLogAggregationReportsForAppsFieldBuilder() : null;
} else {
logAggregationReportsForAppsBuilder_.addAllMessages(other.logAggregationReportsForApps_);
}
}
}
if (other.hasNodeAttributes()) {
mergeNodeAttributes(other.getNodeAttributes());
}
if (other.hasNodeStatus()) {
mergeNodeStatus(other.getNodeStatus());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (hasResource()) {
if (!getResource().isInitialized()) {
return false;
}
}
for (int i = 0; i < getContainerStatusesCount(); i++) {
if (!getContainerStatuses(i).isInitialized()) {
return false;
}
}
if (hasPhysicalResource()) {
if (!getPhysicalResource().isInitialized()) {
return false;
}
}
if (hasNodeAttributes()) {
if (!getNodeAttributes().isInitialized()) {
return false;
}
}
if (hasNodeStatus()) {
if (!getNodeStatus().isInitialized()) {
return false;
}
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getNodeIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 24: {
httpPort_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 24
case 34: {
input.readMessage(
getResourceFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 34
case 42: {
nmVersion_ = input.readBytes();
bitField0_ |= 0x00000008;
break;
} // case 42
case 50: {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto m =
input.readMessage(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto.PARSER,
extensionRegistry);
if (containerStatusesBuilder_ == null) {
ensureContainerStatusesIsMutable();
containerStatuses_.add(m);
} else {
containerStatusesBuilder_.addMessage(m);
}
break;
} // case 50
case 58: {
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto m =
input.readMessage(
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.PARSER,
extensionRegistry);
if (runningApplicationsBuilder_ == null) {
ensureRunningApplicationsIsMutable();
runningApplications_.add(m);
} else {
runningApplicationsBuilder_.addMessage(m);
}
break;
} // case 58
case 66: {
input.readMessage(
getNodeLabelsFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000040;
break;
} // case 66
case 74: {
input.readMessage(
getPhysicalResourceFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000080;
break;
} // case 74
case 82: {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto m =
input.readMessage(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto.PARSER,
extensionRegistry);
if (logAggregationReportsForAppsBuilder_ == null) {
ensureLogAggregationReportsForAppsIsMutable();
logAggregationReportsForApps_.add(m);
} else {
logAggregationReportsForAppsBuilder_.addMessage(m);
}
break;
} // case 82
case 90: {
input.readMessage(
getNodeAttributesFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000200;
break;
} // case 90
case 98: {
input.readMessage(
getNodeStatusFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000400;
break;
} // case 98
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto nodeId_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> nodeIdBuilder_;
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
* @return Whether the nodeId field is set.
*/
public boolean hasNodeId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
* @return The nodeId.
*/
public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId() {
if (nodeIdBuilder_ == null) {
return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
} else {
return nodeIdBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
*/
public Builder setNodeId(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) {
if (nodeIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
nodeId_ = value;
} else {
nodeIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
*/
public Builder setNodeId(
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder builderForValue) {
if (nodeIdBuilder_ == null) {
nodeId_ = builderForValue.build();
} else {
nodeIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
*/
public Builder mergeNodeId(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) {
if (nodeIdBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
nodeId_ != null &&
nodeId_ != org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance()) {
getNodeIdBuilder().mergeFrom(value);
} else {
nodeId_ = value;
}
} else {
nodeIdBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
*/
public Builder clearNodeId() {
bitField0_ = (bitField0_ & ~0x00000001);
nodeId_ = null;
if (nodeIdBuilder_ != null) {
nodeIdBuilder_.dispose();
nodeIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder getNodeIdBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getNodeIdFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder() {
if (nodeIdBuilder_ != null) {
return nodeIdBuilder_.getMessageOrBuilder();
} else {
return nodeId_ == null ?
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
}
}
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder>
getNodeIdFieldBuilder() {
if (nodeIdBuilder_ == null) {
nodeIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder>(
getNodeId(),
getParentForChildren(),
isClean());
nodeId_ = null;
}
return nodeIdBuilder_;
}
private int httpPort_ ;
/**
* optional int32 http_port = 3;
* @return Whether the httpPort field is set.
*/
@java.lang.Override
public boolean hasHttpPort() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional int32 http_port = 3;
* @return The httpPort.
*/
@java.lang.Override
public int getHttpPort() {
return httpPort_;
}
/**
* optional int32 http_port = 3;
* @param value The httpPort to set.
* @return This builder for chaining.
*/
public Builder setHttpPort(int value) {
httpPort_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* optional int32 http_port = 3;
* @return This builder for chaining.
*/
public Builder clearHttpPort() {
bitField0_ = (bitField0_ & ~0x00000002);
httpPort_ = 0;
onChanged();
return this;
}
private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resource_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> resourceBuilder_;
/**
* optional .hadoop.yarn.ResourceProto resource = 4;
* @return Whether the resource field is set.
*/
public boolean hasResource() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional .hadoop.yarn.ResourceProto resource = 4;
* @return The resource.
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource() {
if (resourceBuilder_ == null) {
return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
} else {
return resourceBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.ResourceProto resource = 4;
*/
public Builder setResource(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
if (resourceBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
resource_ = value;
} else {
resourceBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ResourceProto resource = 4;
*/
public Builder setResource(
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) {
if (resourceBuilder_ == null) {
resource_ = builderForValue.build();
} else {
resourceBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ResourceProto resource = 4;
*/
public Builder mergeResource(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
if (resourceBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0) &&
resource_ != null &&
resource_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) {
getResourceBuilder().mergeFrom(value);
} else {
resource_ = value;
}
} else {
resourceBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ResourceProto resource = 4;
*/
public Builder clearResource() {
bitField0_ = (bitField0_ & ~0x00000004);
resource_ = null;
if (resourceBuilder_ != null) {
resourceBuilder_.dispose();
resourceBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ResourceProto resource = 4;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getResourceBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getResourceFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.ResourceProto resource = 4;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder() {
if (resourceBuilder_ != null) {
return resourceBuilder_.getMessageOrBuilder();
} else {
return resource_ == null ?
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
}
}
/**
* optional .hadoop.yarn.ResourceProto resource = 4;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>
getResourceFieldBuilder() {
if (resourceBuilder_ == null) {
resourceBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>(
getResource(),
getParentForChildren(),
isClean());
resource_ = null;
}
return resourceBuilder_;
}
private java.lang.Object nmVersion_ = "";
/**
* optional string nm_version = 5;
* @return Whether the nmVersion field is set.
*/
public boolean hasNmVersion() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* optional string nm_version = 5;
* @return The nmVersion.
*/
public java.lang.String getNmVersion() {
java.lang.Object ref = nmVersion_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
nmVersion_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string nm_version = 5;
* @return The bytes for nmVersion.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getNmVersionBytes() {
java.lang.Object ref = nmVersion_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
nmVersion_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string nm_version = 5;
* @param value The nmVersion to set.
* @return This builder for chaining.
*/
public Builder setNmVersion(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
nmVersion_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
* optional string nm_version = 5;
* @return This builder for chaining.
*/
public Builder clearNmVersion() {
nmVersion_ = getDefaultInstance().getNmVersion();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
* optional string nm_version = 5;
* @param value The bytes for nmVersion to set.
* @return This builder for chaining.
*/
public Builder setNmVersionBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
nmVersion_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
private java.util.List containerStatuses_ =
java.util.Collections.emptyList();
private void ensureContainerStatusesIsMutable() {
if (!((bitField0_ & 0x00000010) != 0)) {
containerStatuses_ = new java.util.ArrayList(containerStatuses_);
bitField0_ |= 0x00000010;
}
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProtoOrBuilder> containerStatusesBuilder_;
/**
* repeated .hadoop.yarn.NMContainerStatusProto container_statuses = 6;
*/
public java.util.List getContainerStatusesList() {
if (containerStatusesBuilder_ == null) {
return java.util.Collections.unmodifiableList(containerStatuses_);
} else {
return containerStatusesBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.yarn.NMContainerStatusProto container_statuses = 6;
*/
public int getContainerStatusesCount() {
if (containerStatusesBuilder_ == null) {
return containerStatuses_.size();
} else {
return containerStatusesBuilder_.getCount();
}
}
/**
* repeated .hadoop.yarn.NMContainerStatusProto container_statuses = 6;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto getContainerStatuses(int index) {
if (containerStatusesBuilder_ == null) {
return containerStatuses_.get(index);
} else {
return containerStatusesBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.yarn.NMContainerStatusProto container_statuses = 6;
*/
public Builder setContainerStatuses(
int index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto value) {
if (containerStatusesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureContainerStatusesIsMutable();
containerStatuses_.set(index, value);
onChanged();
} else {
containerStatusesBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.NMContainerStatusProto container_statuses = 6;
*/
public Builder setContainerStatuses(
int index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto.Builder builderForValue) {
if (containerStatusesBuilder_ == null) {
ensureContainerStatusesIsMutable();
containerStatuses_.set(index, builderForValue.build());
onChanged();
} else {
containerStatusesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.NMContainerStatusProto container_statuses = 6;
*/
public Builder addContainerStatuses(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto value) {
if (containerStatusesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureContainerStatusesIsMutable();
containerStatuses_.add(value);
onChanged();
} else {
containerStatusesBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.yarn.NMContainerStatusProto container_statuses = 6;
*/
public Builder addContainerStatuses(
int index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto value) {
if (containerStatusesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureContainerStatusesIsMutable();
containerStatuses_.add(index, value);
onChanged();
} else {
containerStatusesBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.NMContainerStatusProto container_statuses = 6;
*/
public Builder addContainerStatuses(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto.Builder builderForValue) {
if (containerStatusesBuilder_ == null) {
ensureContainerStatusesIsMutable();
containerStatuses_.add(builderForValue.build());
onChanged();
} else {
containerStatusesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.NMContainerStatusProto container_statuses = 6;
*/
public Builder addContainerStatuses(
int index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto.Builder builderForValue) {
if (containerStatusesBuilder_ == null) {
ensureContainerStatusesIsMutable();
containerStatuses_.add(index, builderForValue.build());
onChanged();
} else {
containerStatusesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.NMContainerStatusProto container_statuses = 6;
*/
public Builder addAllContainerStatuses(
java.lang.Iterable extends org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto> values) {
if (containerStatusesBuilder_ == null) {
ensureContainerStatusesIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, containerStatuses_);
onChanged();
} else {
containerStatusesBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.yarn.NMContainerStatusProto container_statuses = 6;
*/
public Builder clearContainerStatuses() {
if (containerStatusesBuilder_ == null) {
containerStatuses_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000010);
onChanged();
} else {
containerStatusesBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.yarn.NMContainerStatusProto container_statuses = 6;
*/
public Builder removeContainerStatuses(int index) {
if (containerStatusesBuilder_ == null) {
ensureContainerStatusesIsMutable();
containerStatuses_.remove(index);
onChanged();
} else {
containerStatusesBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.yarn.NMContainerStatusProto container_statuses = 6;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto.Builder getContainerStatusesBuilder(
int index) {
return getContainerStatusesFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.yarn.NMContainerStatusProto container_statuses = 6;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProtoOrBuilder getContainerStatusesOrBuilder(
int index) {
if (containerStatusesBuilder_ == null) {
return containerStatuses_.get(index); } else {
return containerStatusesBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.yarn.NMContainerStatusProto container_statuses = 6;
*/
public java.util.List extends org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProtoOrBuilder>
getContainerStatusesOrBuilderList() {
if (containerStatusesBuilder_ != null) {
return containerStatusesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(containerStatuses_);
}
}
/**
* repeated .hadoop.yarn.NMContainerStatusProto container_statuses = 6;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto.Builder addContainerStatusesBuilder() {
return getContainerStatusesFieldBuilder().addBuilder(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.NMContainerStatusProto container_statuses = 6;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto.Builder addContainerStatusesBuilder(
int index) {
return getContainerStatusesFieldBuilder().addBuilder(
index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.NMContainerStatusProto container_statuses = 6;
*/
public java.util.List
getContainerStatusesBuilderList() {
return getContainerStatusesFieldBuilder().getBuilderList();
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProtoOrBuilder>
getContainerStatusesFieldBuilder() {
if (containerStatusesBuilder_ == null) {
containerStatusesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProtoOrBuilder>(
containerStatuses_,
((bitField0_ & 0x00000010) != 0),
getParentForChildren(),
isClean());
containerStatuses_ = null;
}
return containerStatusesBuilder_;
}
private java.util.List runningApplications_ =
java.util.Collections.emptyList();
private void ensureRunningApplicationsIsMutable() {
if (!((bitField0_ & 0x00000020) != 0)) {
runningApplications_ = new java.util.ArrayList(runningApplications_);
bitField0_ |= 0x00000020;
}
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> runningApplicationsBuilder_;
/**
* repeated .hadoop.yarn.ApplicationIdProto runningApplications = 7;
*/
public java.util.List getRunningApplicationsList() {
if (runningApplicationsBuilder_ == null) {
return java.util.Collections.unmodifiableList(runningApplications_);
} else {
return runningApplicationsBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.yarn.ApplicationIdProto runningApplications = 7;
*/
public int getRunningApplicationsCount() {
if (runningApplicationsBuilder_ == null) {
return runningApplications_.size();
} else {
return runningApplicationsBuilder_.getCount();
}
}
/**
* repeated .hadoop.yarn.ApplicationIdProto runningApplications = 7;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getRunningApplications(int index) {
if (runningApplicationsBuilder_ == null) {
return runningApplications_.get(index);
} else {
return runningApplicationsBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.yarn.ApplicationIdProto runningApplications = 7;
*/
public Builder setRunningApplications(
int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
if (runningApplicationsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRunningApplicationsIsMutable();
runningApplications_.set(index, value);
onChanged();
} else {
runningApplicationsBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.ApplicationIdProto runningApplications = 7;
*/
public Builder setRunningApplications(
int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) {
if (runningApplicationsBuilder_ == null) {
ensureRunningApplicationsIsMutable();
runningApplications_.set(index, builderForValue.build());
onChanged();
} else {
runningApplicationsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.ApplicationIdProto runningApplications = 7;
*/
public Builder addRunningApplications(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
if (runningApplicationsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRunningApplicationsIsMutable();
runningApplications_.add(value);
onChanged();
} else {
runningApplicationsBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.yarn.ApplicationIdProto runningApplications = 7;
*/
public Builder addRunningApplications(
int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
if (runningApplicationsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRunningApplicationsIsMutable();
runningApplications_.add(index, value);
onChanged();
} else {
runningApplicationsBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.ApplicationIdProto runningApplications = 7;
*/
public Builder addRunningApplications(
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) {
if (runningApplicationsBuilder_ == null) {
ensureRunningApplicationsIsMutable();
runningApplications_.add(builderForValue.build());
onChanged();
} else {
runningApplicationsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.ApplicationIdProto runningApplications = 7;
*/
public Builder addRunningApplications(
int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) {
if (runningApplicationsBuilder_ == null) {
ensureRunningApplicationsIsMutable();
runningApplications_.add(index, builderForValue.build());
onChanged();
} else {
runningApplicationsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.ApplicationIdProto runningApplications = 7;
*/
public Builder addAllRunningApplications(
java.lang.Iterable extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto> values) {
if (runningApplicationsBuilder_ == null) {
ensureRunningApplicationsIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, runningApplications_);
onChanged();
} else {
runningApplicationsBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.yarn.ApplicationIdProto runningApplications = 7;
*/
public Builder clearRunningApplications() {
if (runningApplicationsBuilder_ == null) {
runningApplications_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000020);
onChanged();
} else {
runningApplicationsBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.yarn.ApplicationIdProto runningApplications = 7;
*/
public Builder removeRunningApplications(int index) {
if (runningApplicationsBuilder_ == null) {
ensureRunningApplicationsIsMutable();
runningApplications_.remove(index);
onChanged();
} else {
runningApplicationsBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.yarn.ApplicationIdProto runningApplications = 7;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getRunningApplicationsBuilder(
int index) {
return getRunningApplicationsFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.yarn.ApplicationIdProto runningApplications = 7;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getRunningApplicationsOrBuilder(
int index) {
if (runningApplicationsBuilder_ == null) {
return runningApplications_.get(index); } else {
return runningApplicationsBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.yarn.ApplicationIdProto runningApplications = 7;
*/
public java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>
getRunningApplicationsOrBuilderList() {
if (runningApplicationsBuilder_ != null) {
return runningApplicationsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(runningApplications_);
}
}
/**
* repeated .hadoop.yarn.ApplicationIdProto runningApplications = 7;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder addRunningApplicationsBuilder() {
return getRunningApplicationsFieldBuilder().addBuilder(
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.ApplicationIdProto runningApplications = 7;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder addRunningApplicationsBuilder(
int index) {
return getRunningApplicationsFieldBuilder().addBuilder(
index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.ApplicationIdProto runningApplications = 7;
*/
public java.util.List
getRunningApplicationsBuilderList() {
return getRunningApplicationsFieldBuilder().getBuilderList();
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>
getRunningApplicationsFieldBuilder() {
if (runningApplicationsBuilder_ == null) {
runningApplicationsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>(
runningApplications_,
((bitField0_ & 0x00000020) != 0),
getParentForChildren(),
isClean());
runningApplications_ = null;
}
return runningApplicationsBuilder_;
}
private org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto nodeLabels_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProtoOrBuilder> nodeLabelsBuilder_;
/**
* optional .hadoop.yarn.NodeLabelsProto nodeLabels = 8;
* @return Whether the nodeLabels field is set.
*/
public boolean hasNodeLabels() {
return ((bitField0_ & 0x00000040) != 0);
}
/**
* optional .hadoop.yarn.NodeLabelsProto nodeLabels = 8;
* @return The nodeLabels.
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto getNodeLabels() {
if (nodeLabelsBuilder_ == null) {
return nodeLabels_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto.getDefaultInstance() : nodeLabels_;
} else {
return nodeLabelsBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.NodeLabelsProto nodeLabels = 8;
*/
public Builder setNodeLabels(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto value) {
if (nodeLabelsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
nodeLabels_ = value;
} else {
nodeLabelsBuilder_.setMessage(value);
}
bitField0_ |= 0x00000040;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeLabelsProto nodeLabels = 8;
*/
public Builder setNodeLabels(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto.Builder builderForValue) {
if (nodeLabelsBuilder_ == null) {
nodeLabels_ = builderForValue.build();
} else {
nodeLabelsBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000040;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeLabelsProto nodeLabels = 8;
*/
public Builder mergeNodeLabels(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto value) {
if (nodeLabelsBuilder_ == null) {
if (((bitField0_ & 0x00000040) != 0) &&
nodeLabels_ != null &&
nodeLabels_ != org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto.getDefaultInstance()) {
getNodeLabelsBuilder().mergeFrom(value);
} else {
nodeLabels_ = value;
}
} else {
nodeLabelsBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000040;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeLabelsProto nodeLabels = 8;
*/
public Builder clearNodeLabels() {
bitField0_ = (bitField0_ & ~0x00000040);
nodeLabels_ = null;
if (nodeLabelsBuilder_ != null) {
nodeLabelsBuilder_.dispose();
nodeLabelsBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeLabelsProto nodeLabels = 8;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto.Builder getNodeLabelsBuilder() {
bitField0_ |= 0x00000040;
onChanged();
return getNodeLabelsFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.NodeLabelsProto nodeLabels = 8;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProtoOrBuilder getNodeLabelsOrBuilder() {
if (nodeLabelsBuilder_ != null) {
return nodeLabelsBuilder_.getMessageOrBuilder();
} else {
return nodeLabels_ == null ?
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto.getDefaultInstance() : nodeLabels_;
}
}
/**
* optional .hadoop.yarn.NodeLabelsProto nodeLabels = 8;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProtoOrBuilder>
getNodeLabelsFieldBuilder() {
if (nodeLabelsBuilder_ == null) {
nodeLabelsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProtoOrBuilder>(
getNodeLabels(),
getParentForChildren(),
isClean());
nodeLabels_ = null;
}
return nodeLabelsBuilder_;
}
private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto physicalResource_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> physicalResourceBuilder_;
/**
* optional .hadoop.yarn.ResourceProto physicalResource = 9;
* @return Whether the physicalResource field is set.
*/
public boolean hasPhysicalResource() {
return ((bitField0_ & 0x00000080) != 0);
}
/**
* optional .hadoop.yarn.ResourceProto physicalResource = 9;
* @return The physicalResource.
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getPhysicalResource() {
if (physicalResourceBuilder_ == null) {
return physicalResource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : physicalResource_;
} else {
return physicalResourceBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.ResourceProto physicalResource = 9;
*/
public Builder setPhysicalResource(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
if (physicalResourceBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
physicalResource_ = value;
} else {
physicalResourceBuilder_.setMessage(value);
}
bitField0_ |= 0x00000080;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ResourceProto physicalResource = 9;
*/
public Builder setPhysicalResource(
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) {
if (physicalResourceBuilder_ == null) {
physicalResource_ = builderForValue.build();
} else {
physicalResourceBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000080;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ResourceProto physicalResource = 9;
*/
public Builder mergePhysicalResource(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
if (physicalResourceBuilder_ == null) {
if (((bitField0_ & 0x00000080) != 0) &&
physicalResource_ != null &&
physicalResource_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) {
getPhysicalResourceBuilder().mergeFrom(value);
} else {
physicalResource_ = value;
}
} else {
physicalResourceBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000080;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ResourceProto physicalResource = 9;
*/
public Builder clearPhysicalResource() {
bitField0_ = (bitField0_ & ~0x00000080);
physicalResource_ = null;
if (physicalResourceBuilder_ != null) {
physicalResourceBuilder_.dispose();
physicalResourceBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ResourceProto physicalResource = 9;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getPhysicalResourceBuilder() {
bitField0_ |= 0x00000080;
onChanged();
return getPhysicalResourceFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.ResourceProto physicalResource = 9;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getPhysicalResourceOrBuilder() {
if (physicalResourceBuilder_ != null) {
return physicalResourceBuilder_.getMessageOrBuilder();
} else {
return physicalResource_ == null ?
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : physicalResource_;
}
}
/**
* optional .hadoop.yarn.ResourceProto physicalResource = 9;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>
getPhysicalResourceFieldBuilder() {
if (physicalResourceBuilder_ == null) {
physicalResourceBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>(
getPhysicalResource(),
getParentForChildren(),
isClean());
physicalResource_ = null;
}
return physicalResourceBuilder_;
}
private java.util.List logAggregationReportsForApps_ =
java.util.Collections.emptyList();
private void ensureLogAggregationReportsForAppsIsMutable() {
if (!((bitField0_ & 0x00000100) != 0)) {
logAggregationReportsForApps_ = new java.util.ArrayList(logAggregationReportsForApps_);
bitField0_ |= 0x00000100;
}
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProtoOrBuilder> logAggregationReportsForAppsBuilder_;
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 10;
*/
public java.util.List getLogAggregationReportsForAppsList() {
if (logAggregationReportsForAppsBuilder_ == null) {
return java.util.Collections.unmodifiableList(logAggregationReportsForApps_);
} else {
return logAggregationReportsForAppsBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 10;
*/
public int getLogAggregationReportsForAppsCount() {
if (logAggregationReportsForAppsBuilder_ == null) {
return logAggregationReportsForApps_.size();
} else {
return logAggregationReportsForAppsBuilder_.getCount();
}
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 10;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto getLogAggregationReportsForApps(int index) {
if (logAggregationReportsForAppsBuilder_ == null) {
return logAggregationReportsForApps_.get(index);
} else {
return logAggregationReportsForAppsBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 10;
*/
public Builder setLogAggregationReportsForApps(
int index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto value) {
if (logAggregationReportsForAppsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureLogAggregationReportsForAppsIsMutable();
logAggregationReportsForApps_.set(index, value);
onChanged();
} else {
logAggregationReportsForAppsBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 10;
*/
public Builder setLogAggregationReportsForApps(
int index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto.Builder builderForValue) {
if (logAggregationReportsForAppsBuilder_ == null) {
ensureLogAggregationReportsForAppsIsMutable();
logAggregationReportsForApps_.set(index, builderForValue.build());
onChanged();
} else {
logAggregationReportsForAppsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 10;
*/
public Builder addLogAggregationReportsForApps(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto value) {
if (logAggregationReportsForAppsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureLogAggregationReportsForAppsIsMutable();
logAggregationReportsForApps_.add(value);
onChanged();
} else {
logAggregationReportsForAppsBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 10;
*/
public Builder addLogAggregationReportsForApps(
int index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto value) {
if (logAggregationReportsForAppsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureLogAggregationReportsForAppsIsMutable();
logAggregationReportsForApps_.add(index, value);
onChanged();
} else {
logAggregationReportsForAppsBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 10;
*/
public Builder addLogAggregationReportsForApps(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto.Builder builderForValue) {
if (logAggregationReportsForAppsBuilder_ == null) {
ensureLogAggregationReportsForAppsIsMutable();
logAggregationReportsForApps_.add(builderForValue.build());
onChanged();
} else {
logAggregationReportsForAppsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 10;
*/
public Builder addLogAggregationReportsForApps(
int index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto.Builder builderForValue) {
if (logAggregationReportsForAppsBuilder_ == null) {
ensureLogAggregationReportsForAppsIsMutable();
logAggregationReportsForApps_.add(index, builderForValue.build());
onChanged();
} else {
logAggregationReportsForAppsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 10;
*/
public Builder addAllLogAggregationReportsForApps(
java.lang.Iterable extends org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto> values) {
if (logAggregationReportsForAppsBuilder_ == null) {
ensureLogAggregationReportsForAppsIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, logAggregationReportsForApps_);
onChanged();
} else {
logAggregationReportsForAppsBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 10;
*/
public Builder clearLogAggregationReportsForApps() {
if (logAggregationReportsForAppsBuilder_ == null) {
logAggregationReportsForApps_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000100);
onChanged();
} else {
logAggregationReportsForAppsBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 10;
*/
public Builder removeLogAggregationReportsForApps(int index) {
if (logAggregationReportsForAppsBuilder_ == null) {
ensureLogAggregationReportsForAppsIsMutable();
logAggregationReportsForApps_.remove(index);
onChanged();
} else {
logAggregationReportsForAppsBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 10;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto.Builder getLogAggregationReportsForAppsBuilder(
int index) {
return getLogAggregationReportsForAppsFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 10;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProtoOrBuilder getLogAggregationReportsForAppsOrBuilder(
int index) {
if (logAggregationReportsForAppsBuilder_ == null) {
return logAggregationReportsForApps_.get(index); } else {
return logAggregationReportsForAppsBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 10;
*/
public java.util.List extends org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProtoOrBuilder>
getLogAggregationReportsForAppsOrBuilderList() {
if (logAggregationReportsForAppsBuilder_ != null) {
return logAggregationReportsForAppsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(logAggregationReportsForApps_);
}
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 10;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto.Builder addLogAggregationReportsForAppsBuilder() {
return getLogAggregationReportsForAppsFieldBuilder().addBuilder(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 10;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto.Builder addLogAggregationReportsForAppsBuilder(
int index) {
return getLogAggregationReportsForAppsFieldBuilder().addBuilder(
index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 10;
*/
public java.util.List
getLogAggregationReportsForAppsBuilderList() {
return getLogAggregationReportsForAppsFieldBuilder().getBuilderList();
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProtoOrBuilder>
getLogAggregationReportsForAppsFieldBuilder() {
if (logAggregationReportsForAppsBuilder_ == null) {
logAggregationReportsForAppsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProtoOrBuilder>(
logAggregationReportsForApps_,
((bitField0_ & 0x00000100) != 0),
getParentForChildren(),
isClean());
logAggregationReportsForApps_ = null;
}
return logAggregationReportsForAppsBuilder_;
}
private org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto nodeAttributes_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProtoOrBuilder> nodeAttributesBuilder_;
/**
* optional .hadoop.yarn.NodeAttributesProto nodeAttributes = 11;
* @return Whether the nodeAttributes field is set.
*/
public boolean hasNodeAttributes() {
return ((bitField0_ & 0x00000200) != 0);
}
/**
* optional .hadoop.yarn.NodeAttributesProto nodeAttributes = 11;
* @return The nodeAttributes.
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto getNodeAttributes() {
if (nodeAttributesBuilder_ == null) {
return nodeAttributes_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto.getDefaultInstance() : nodeAttributes_;
} else {
return nodeAttributesBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.NodeAttributesProto nodeAttributes = 11;
*/
public Builder setNodeAttributes(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto value) {
if (nodeAttributesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
nodeAttributes_ = value;
} else {
nodeAttributesBuilder_.setMessage(value);
}
bitField0_ |= 0x00000200;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeAttributesProto nodeAttributes = 11;
*/
public Builder setNodeAttributes(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto.Builder builderForValue) {
if (nodeAttributesBuilder_ == null) {
nodeAttributes_ = builderForValue.build();
} else {
nodeAttributesBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000200;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeAttributesProto nodeAttributes = 11;
*/
public Builder mergeNodeAttributes(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto value) {
if (nodeAttributesBuilder_ == null) {
if (((bitField0_ & 0x00000200) != 0) &&
nodeAttributes_ != null &&
nodeAttributes_ != org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto.getDefaultInstance()) {
getNodeAttributesBuilder().mergeFrom(value);
} else {
nodeAttributes_ = value;
}
} else {
nodeAttributesBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000200;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeAttributesProto nodeAttributes = 11;
*/
public Builder clearNodeAttributes() {
bitField0_ = (bitField0_ & ~0x00000200);
nodeAttributes_ = null;
if (nodeAttributesBuilder_ != null) {
nodeAttributesBuilder_.dispose();
nodeAttributesBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeAttributesProto nodeAttributes = 11;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto.Builder getNodeAttributesBuilder() {
bitField0_ |= 0x00000200;
onChanged();
return getNodeAttributesFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.NodeAttributesProto nodeAttributes = 11;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProtoOrBuilder getNodeAttributesOrBuilder() {
if (nodeAttributesBuilder_ != null) {
return nodeAttributesBuilder_.getMessageOrBuilder();
} else {
return nodeAttributes_ == null ?
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto.getDefaultInstance() : nodeAttributes_;
}
}
/**
* optional .hadoop.yarn.NodeAttributesProto nodeAttributes = 11;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProtoOrBuilder>
getNodeAttributesFieldBuilder() {
if (nodeAttributesBuilder_ == null) {
nodeAttributesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProtoOrBuilder>(
getNodeAttributes(),
getParentForChildren(),
isClean());
nodeAttributes_ = null;
}
return nodeAttributesBuilder_;
}
private org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto nodeStatus_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProtoOrBuilder> nodeStatusBuilder_;
/**
* optional .hadoop.yarn.NodeStatusProto nodeStatus = 12;
* @return Whether the nodeStatus field is set.
*/
public boolean hasNodeStatus() {
return ((bitField0_ & 0x00000400) != 0);
}
/**
* optional .hadoop.yarn.NodeStatusProto nodeStatus = 12;
* @return The nodeStatus.
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto getNodeStatus() {
if (nodeStatusBuilder_ == null) {
return nodeStatus_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto.getDefaultInstance() : nodeStatus_;
} else {
return nodeStatusBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.NodeStatusProto nodeStatus = 12;
*/
public Builder setNodeStatus(org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto value) {
if (nodeStatusBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
nodeStatus_ = value;
} else {
nodeStatusBuilder_.setMessage(value);
}
bitField0_ |= 0x00000400;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeStatusProto nodeStatus = 12;
*/
public Builder setNodeStatus(
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto.Builder builderForValue) {
if (nodeStatusBuilder_ == null) {
nodeStatus_ = builderForValue.build();
} else {
nodeStatusBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000400;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeStatusProto nodeStatus = 12;
*/
public Builder mergeNodeStatus(org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto value) {
if (nodeStatusBuilder_ == null) {
if (((bitField0_ & 0x00000400) != 0) &&
nodeStatus_ != null &&
nodeStatus_ != org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto.getDefaultInstance()) {
getNodeStatusBuilder().mergeFrom(value);
} else {
nodeStatus_ = value;
}
} else {
nodeStatusBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000400;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeStatusProto nodeStatus = 12;
*/
public Builder clearNodeStatus() {
bitField0_ = (bitField0_ & ~0x00000400);
nodeStatus_ = null;
if (nodeStatusBuilder_ != null) {
nodeStatusBuilder_.dispose();
nodeStatusBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeStatusProto nodeStatus = 12;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto.Builder getNodeStatusBuilder() {
bitField0_ |= 0x00000400;
onChanged();
return getNodeStatusFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.NodeStatusProto nodeStatus = 12;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProtoOrBuilder getNodeStatusOrBuilder() {
if (nodeStatusBuilder_ != null) {
return nodeStatusBuilder_.getMessageOrBuilder();
} else {
return nodeStatus_ == null ?
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto.getDefaultInstance() : nodeStatus_;
}
}
/**
* optional .hadoop.yarn.NodeStatusProto nodeStatus = 12;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProtoOrBuilder>
getNodeStatusFieldBuilder() {
if (nodeStatusBuilder_ == null) {
nodeStatusBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProtoOrBuilder>(
getNodeStatus(),
getParentForChildren(),
isClean());
nodeStatus_ = null;
}
return nodeStatusBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.RegisterNodeManagerRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.RegisterNodeManagerRequestProto)
private static final org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto();
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public RegisterNodeManagerRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface RegisterNodeManagerResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.RegisterNodeManagerResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.MasterKeyProto container_token_master_key = 1;
* @return Whether the containerTokenMasterKey field is set.
*/
boolean hasContainerTokenMasterKey();
/**
* optional .hadoop.yarn.MasterKeyProto container_token_master_key = 1;
* @return The containerTokenMasterKey.
*/
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto getContainerTokenMasterKey();
/**
* optional .hadoop.yarn.MasterKeyProto container_token_master_key = 1;
*/
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder getContainerTokenMasterKeyOrBuilder();
/**
* optional .hadoop.yarn.MasterKeyProto nm_token_master_key = 2;
* @return Whether the nmTokenMasterKey field is set.
*/
boolean hasNmTokenMasterKey();
/**
* optional .hadoop.yarn.MasterKeyProto nm_token_master_key = 2;
* @return The nmTokenMasterKey.
*/
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto getNmTokenMasterKey();
/**
* optional .hadoop.yarn.MasterKeyProto nm_token_master_key = 2;
*/
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder getNmTokenMasterKeyOrBuilder();
/**
* optional .hadoop.yarn.NodeActionProto nodeAction = 3;
* @return Whether the nodeAction field is set.
*/
boolean hasNodeAction();
/**
* optional .hadoop.yarn.NodeActionProto nodeAction = 3;
* @return The nodeAction.
*/
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeActionProto getNodeAction();
/**
* optional int64 rm_identifier = 4;
* @return Whether the rmIdentifier field is set.
*/
boolean hasRmIdentifier();
/**
* optional int64 rm_identifier = 4;
* @return The rmIdentifier.
*/
long getRmIdentifier();
/**
* optional string diagnostics_message = 5;
* @return Whether the diagnosticsMessage field is set.
*/
boolean hasDiagnosticsMessage();
/**
* optional string diagnostics_message = 5;
* @return The diagnosticsMessage.
*/
java.lang.String getDiagnosticsMessage();
/**
* optional string diagnostics_message = 5;
* @return The bytes for diagnosticsMessage.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getDiagnosticsMessageBytes();
/**
* optional string rm_version = 6;
* @return Whether the rmVersion field is set.
*/
boolean hasRmVersion();
/**
* optional string rm_version = 6;
* @return The rmVersion.
*/
java.lang.String getRmVersion();
/**
* optional string rm_version = 6;
* @return The bytes for rmVersion.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getRmVersionBytes();
/**
* optional bool areNodeLabelsAcceptedByRM = 7 [default = false];
* @return Whether the areNodeLabelsAcceptedByRM field is set.
*/
boolean hasAreNodeLabelsAcceptedByRM();
/**
* optional bool areNodeLabelsAcceptedByRM = 7 [default = false];
* @return The areNodeLabelsAcceptedByRM.
*/
boolean getAreNodeLabelsAcceptedByRM();
/**
* optional .hadoop.yarn.ResourceProto resource = 8;
* @return Whether the resource field is set.
*/
boolean hasResource();
/**
* optional .hadoop.yarn.ResourceProto resource = 8;
* @return The resource.
*/
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource();
/**
* optional .hadoop.yarn.ResourceProto resource = 8;
*/
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder();
/**
* optional bool areNodeAttributesAcceptedByRM = 9 [default = false];
* @return Whether the areNodeAttributesAcceptedByRM field is set.
*/
boolean hasAreNodeAttributesAcceptedByRM();
/**
* optional bool areNodeAttributesAcceptedByRM = 9 [default = false];
* @return The areNodeAttributesAcceptedByRM.
*/
boolean getAreNodeAttributesAcceptedByRM();
}
/**
* Protobuf type {@code hadoop.yarn.RegisterNodeManagerResponseProto}
*/
public static final class RegisterNodeManagerResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.RegisterNodeManagerResponseProto)
RegisterNodeManagerResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use RegisterNodeManagerResponseProto.newBuilder() to construct.
private RegisterNodeManagerResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private RegisterNodeManagerResponseProto() {
nodeAction_ = 0;
diagnosticsMessage_ = "";
rmVersion_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new RegisterNodeManagerResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_RegisterNodeManagerResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_RegisterNodeManagerResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto.Builder.class);
}
private int bitField0_;
public static final int CONTAINER_TOKEN_MASTER_KEY_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto containerTokenMasterKey_;
/**
* optional .hadoop.yarn.MasterKeyProto container_token_master_key = 1;
* @return Whether the containerTokenMasterKey field is set.
*/
@java.lang.Override
public boolean hasContainerTokenMasterKey() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.MasterKeyProto container_token_master_key = 1;
* @return The containerTokenMasterKey.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto getContainerTokenMasterKey() {
return containerTokenMasterKey_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.getDefaultInstance() : containerTokenMasterKey_;
}
/**
* optional .hadoop.yarn.MasterKeyProto container_token_master_key = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder getContainerTokenMasterKeyOrBuilder() {
return containerTokenMasterKey_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.getDefaultInstance() : containerTokenMasterKey_;
}
public static final int NM_TOKEN_MASTER_KEY_FIELD_NUMBER = 2;
private org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto nmTokenMasterKey_;
/**
* optional .hadoop.yarn.MasterKeyProto nm_token_master_key = 2;
* @return Whether the nmTokenMasterKey field is set.
*/
@java.lang.Override
public boolean hasNmTokenMasterKey() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional .hadoop.yarn.MasterKeyProto nm_token_master_key = 2;
* @return The nmTokenMasterKey.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto getNmTokenMasterKey() {
return nmTokenMasterKey_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.getDefaultInstance() : nmTokenMasterKey_;
}
/**
* optional .hadoop.yarn.MasterKeyProto nm_token_master_key = 2;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder getNmTokenMasterKeyOrBuilder() {
return nmTokenMasterKey_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.getDefaultInstance() : nmTokenMasterKey_;
}
public static final int NODEACTION_FIELD_NUMBER = 3;
private int nodeAction_ = 0;
/**
* optional .hadoop.yarn.NodeActionProto nodeAction = 3;
* @return Whether the nodeAction field is set.
*/
@java.lang.Override public boolean hasNodeAction() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional .hadoop.yarn.NodeActionProto nodeAction = 3;
* @return The nodeAction.
*/
@java.lang.Override public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeActionProto getNodeAction() {
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeActionProto result = org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeActionProto.forNumber(nodeAction_);
return result == null ? org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeActionProto.NORMAL : result;
}
public static final int RM_IDENTIFIER_FIELD_NUMBER = 4;
private long rmIdentifier_ = 0L;
/**
* optional int64 rm_identifier = 4;
* @return Whether the rmIdentifier field is set.
*/
@java.lang.Override
public boolean hasRmIdentifier() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* optional int64 rm_identifier = 4;
* @return The rmIdentifier.
*/
@java.lang.Override
public long getRmIdentifier() {
return rmIdentifier_;
}
public static final int DIAGNOSTICS_MESSAGE_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private volatile java.lang.Object diagnosticsMessage_ = "";
/**
* optional string diagnostics_message = 5;
* @return Whether the diagnosticsMessage field is set.
*/
@java.lang.Override
public boolean hasDiagnosticsMessage() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* optional string diagnostics_message = 5;
* @return The diagnosticsMessage.
*/
@java.lang.Override
public java.lang.String getDiagnosticsMessage() {
java.lang.Object ref = diagnosticsMessage_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
diagnosticsMessage_ = s;
}
return s;
}
}
/**
* optional string diagnostics_message = 5;
* @return The bytes for diagnosticsMessage.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getDiagnosticsMessageBytes() {
java.lang.Object ref = diagnosticsMessage_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
diagnosticsMessage_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int RM_VERSION_FIELD_NUMBER = 6;
@SuppressWarnings("serial")
private volatile java.lang.Object rmVersion_ = "";
/**
* optional string rm_version = 6;
* @return Whether the rmVersion field is set.
*/
@java.lang.Override
public boolean hasRmVersion() {
return ((bitField0_ & 0x00000020) != 0);
}
/**
* optional string rm_version = 6;
* @return The rmVersion.
*/
@java.lang.Override
public java.lang.String getRmVersion() {
java.lang.Object ref = rmVersion_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
rmVersion_ = s;
}
return s;
}
}
/**
* optional string rm_version = 6;
* @return The bytes for rmVersion.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getRmVersionBytes() {
java.lang.Object ref = rmVersion_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
rmVersion_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int ARENODELABELSACCEPTEDBYRM_FIELD_NUMBER = 7;
private boolean areNodeLabelsAcceptedByRM_ = false;
/**
* optional bool areNodeLabelsAcceptedByRM = 7 [default = false];
* @return Whether the areNodeLabelsAcceptedByRM field is set.
*/
@java.lang.Override
public boolean hasAreNodeLabelsAcceptedByRM() {
return ((bitField0_ & 0x00000040) != 0);
}
/**
* optional bool areNodeLabelsAcceptedByRM = 7 [default = false];
* @return The areNodeLabelsAcceptedByRM.
*/
@java.lang.Override
public boolean getAreNodeLabelsAcceptedByRM() {
return areNodeLabelsAcceptedByRM_;
}
public static final int RESOURCE_FIELD_NUMBER = 8;
private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resource_;
/**
* optional .hadoop.yarn.ResourceProto resource = 8;
* @return Whether the resource field is set.
*/
@java.lang.Override
public boolean hasResource() {
return ((bitField0_ & 0x00000080) != 0);
}
/**
* optional .hadoop.yarn.ResourceProto resource = 8;
* @return The resource.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource() {
return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
}
/**
* optional .hadoop.yarn.ResourceProto resource = 8;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder() {
return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
}
public static final int ARENODEATTRIBUTESACCEPTEDBYRM_FIELD_NUMBER = 9;
private boolean areNodeAttributesAcceptedByRM_ = false;
/**
* optional bool areNodeAttributesAcceptedByRM = 9 [default = false];
* @return Whether the areNodeAttributesAcceptedByRM field is set.
*/
@java.lang.Override
public boolean hasAreNodeAttributesAcceptedByRM() {
return ((bitField0_ & 0x00000100) != 0);
}
/**
* optional bool areNodeAttributesAcceptedByRM = 9 [default = false];
* @return The areNodeAttributesAcceptedByRM.
*/
@java.lang.Override
public boolean getAreNodeAttributesAcceptedByRM() {
return areNodeAttributesAcceptedByRM_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (hasResource()) {
if (!getResource().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getContainerTokenMasterKey());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getNmTokenMasterKey());
}
if (((bitField0_ & 0x00000004) != 0)) {
output.writeEnum(3, nodeAction_);
}
if (((bitField0_ & 0x00000008) != 0)) {
output.writeInt64(4, rmIdentifier_);
}
if (((bitField0_ & 0x00000010) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 5, diagnosticsMessage_);
}
if (((bitField0_ & 0x00000020) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 6, rmVersion_);
}
if (((bitField0_ & 0x00000040) != 0)) {
output.writeBool(7, areNodeLabelsAcceptedByRM_);
}
if (((bitField0_ & 0x00000080) != 0)) {
output.writeMessage(8, getResource());
}
if (((bitField0_ & 0x00000100) != 0)) {
output.writeBool(9, areNodeAttributesAcceptedByRM_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getContainerTokenMasterKey());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(2, getNmTokenMasterKey());
}
if (((bitField0_ & 0x00000004) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeEnumSize(3, nodeAction_);
}
if (((bitField0_ & 0x00000008) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt64Size(4, rmIdentifier_);
}
if (((bitField0_ & 0x00000010) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(5, diagnosticsMessage_);
}
if (((bitField0_ & 0x00000020) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(6, rmVersion_);
}
if (((bitField0_ & 0x00000040) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeBoolSize(7, areNodeLabelsAcceptedByRM_);
}
if (((bitField0_ & 0x00000080) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(8, getResource());
}
if (((bitField0_ & 0x00000100) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeBoolSize(9, areNodeAttributesAcceptedByRM_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto other = (org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto) obj;
if (hasContainerTokenMasterKey() != other.hasContainerTokenMasterKey()) return false;
if (hasContainerTokenMasterKey()) {
if (!getContainerTokenMasterKey()
.equals(other.getContainerTokenMasterKey())) return false;
}
if (hasNmTokenMasterKey() != other.hasNmTokenMasterKey()) return false;
if (hasNmTokenMasterKey()) {
if (!getNmTokenMasterKey()
.equals(other.getNmTokenMasterKey())) return false;
}
if (hasNodeAction() != other.hasNodeAction()) return false;
if (hasNodeAction()) {
if (nodeAction_ != other.nodeAction_) return false;
}
if (hasRmIdentifier() != other.hasRmIdentifier()) return false;
if (hasRmIdentifier()) {
if (getRmIdentifier()
!= other.getRmIdentifier()) return false;
}
if (hasDiagnosticsMessage() != other.hasDiagnosticsMessage()) return false;
if (hasDiagnosticsMessage()) {
if (!getDiagnosticsMessage()
.equals(other.getDiagnosticsMessage())) return false;
}
if (hasRmVersion() != other.hasRmVersion()) return false;
if (hasRmVersion()) {
if (!getRmVersion()
.equals(other.getRmVersion())) return false;
}
if (hasAreNodeLabelsAcceptedByRM() != other.hasAreNodeLabelsAcceptedByRM()) return false;
if (hasAreNodeLabelsAcceptedByRM()) {
if (getAreNodeLabelsAcceptedByRM()
!= other.getAreNodeLabelsAcceptedByRM()) return false;
}
if (hasResource() != other.hasResource()) return false;
if (hasResource()) {
if (!getResource()
.equals(other.getResource())) return false;
}
if (hasAreNodeAttributesAcceptedByRM() != other.hasAreNodeAttributesAcceptedByRM()) return false;
if (hasAreNodeAttributesAcceptedByRM()) {
if (getAreNodeAttributesAcceptedByRM()
!= other.getAreNodeAttributesAcceptedByRM()) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasContainerTokenMasterKey()) {
hash = (37 * hash) + CONTAINER_TOKEN_MASTER_KEY_FIELD_NUMBER;
hash = (53 * hash) + getContainerTokenMasterKey().hashCode();
}
if (hasNmTokenMasterKey()) {
hash = (37 * hash) + NM_TOKEN_MASTER_KEY_FIELD_NUMBER;
hash = (53 * hash) + getNmTokenMasterKey().hashCode();
}
if (hasNodeAction()) {
hash = (37 * hash) + NODEACTION_FIELD_NUMBER;
hash = (53 * hash) + nodeAction_;
}
if (hasRmIdentifier()) {
hash = (37 * hash) + RM_IDENTIFIER_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
getRmIdentifier());
}
if (hasDiagnosticsMessage()) {
hash = (37 * hash) + DIAGNOSTICS_MESSAGE_FIELD_NUMBER;
hash = (53 * hash) + getDiagnosticsMessage().hashCode();
}
if (hasRmVersion()) {
hash = (37 * hash) + RM_VERSION_FIELD_NUMBER;
hash = (53 * hash) + getRmVersion().hashCode();
}
if (hasAreNodeLabelsAcceptedByRM()) {
hash = (37 * hash) + ARENODELABELSACCEPTEDBYRM_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean(
getAreNodeLabelsAcceptedByRM());
}
if (hasResource()) {
hash = (37 * hash) + RESOURCE_FIELD_NUMBER;
hash = (53 * hash) + getResource().hashCode();
}
if (hasAreNodeAttributesAcceptedByRM()) {
hash = (37 * hash) + ARENODEATTRIBUTESACCEPTEDBYRM_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean(
getAreNodeAttributesAcceptedByRM());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.RegisterNodeManagerResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.RegisterNodeManagerResponseProto)
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_RegisterNodeManagerResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_RegisterNodeManagerResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getContainerTokenMasterKeyFieldBuilder();
getNmTokenMasterKeyFieldBuilder();
getResourceFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
containerTokenMasterKey_ = null;
if (containerTokenMasterKeyBuilder_ != null) {
containerTokenMasterKeyBuilder_.dispose();
containerTokenMasterKeyBuilder_ = null;
}
nmTokenMasterKey_ = null;
if (nmTokenMasterKeyBuilder_ != null) {
nmTokenMasterKeyBuilder_.dispose();
nmTokenMasterKeyBuilder_ = null;
}
nodeAction_ = 0;
rmIdentifier_ = 0L;
diagnosticsMessage_ = "";
rmVersion_ = "";
areNodeLabelsAcceptedByRM_ = false;
resource_ = null;
if (resourceBuilder_ != null) {
resourceBuilder_.dispose();
resourceBuilder_ = null;
}
areNodeAttributesAcceptedByRM_ = false;
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_RegisterNodeManagerResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto build() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto result = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.containerTokenMasterKey_ = containerTokenMasterKeyBuilder_ == null
? containerTokenMasterKey_
: containerTokenMasterKeyBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nmTokenMasterKey_ = nmTokenMasterKeyBuilder_ == null
? nmTokenMasterKey_
: nmTokenMasterKeyBuilder_.build();
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.nodeAction_ = nodeAction_;
to_bitField0_ |= 0x00000004;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.rmIdentifier_ = rmIdentifier_;
to_bitField0_ |= 0x00000008;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.diagnosticsMessage_ = diagnosticsMessage_;
to_bitField0_ |= 0x00000010;
}
if (((from_bitField0_ & 0x00000020) != 0)) {
result.rmVersion_ = rmVersion_;
to_bitField0_ |= 0x00000020;
}
if (((from_bitField0_ & 0x00000040) != 0)) {
result.areNodeLabelsAcceptedByRM_ = areNodeLabelsAcceptedByRM_;
to_bitField0_ |= 0x00000040;
}
if (((from_bitField0_ & 0x00000080) != 0)) {
result.resource_ = resourceBuilder_ == null
? resource_
: resourceBuilder_.build();
to_bitField0_ |= 0x00000080;
}
if (((from_bitField0_ & 0x00000100) != 0)) {
result.areNodeAttributesAcceptedByRM_ = areNodeAttributesAcceptedByRM_;
to_bitField0_ |= 0x00000100;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto) {
return mergeFrom((org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto.getDefaultInstance()) return this;
if (other.hasContainerTokenMasterKey()) {
mergeContainerTokenMasterKey(other.getContainerTokenMasterKey());
}
if (other.hasNmTokenMasterKey()) {
mergeNmTokenMasterKey(other.getNmTokenMasterKey());
}
if (other.hasNodeAction()) {
setNodeAction(other.getNodeAction());
}
if (other.hasRmIdentifier()) {
setRmIdentifier(other.getRmIdentifier());
}
if (other.hasDiagnosticsMessage()) {
diagnosticsMessage_ = other.diagnosticsMessage_;
bitField0_ |= 0x00000010;
onChanged();
}
if (other.hasRmVersion()) {
rmVersion_ = other.rmVersion_;
bitField0_ |= 0x00000020;
onChanged();
}
if (other.hasAreNodeLabelsAcceptedByRM()) {
setAreNodeLabelsAcceptedByRM(other.getAreNodeLabelsAcceptedByRM());
}
if (other.hasResource()) {
mergeResource(other.getResource());
}
if (other.hasAreNodeAttributesAcceptedByRM()) {
setAreNodeAttributesAcceptedByRM(other.getAreNodeAttributesAcceptedByRM());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (hasResource()) {
if (!getResource().isInitialized()) {
return false;
}
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getContainerTokenMasterKeyFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
input.readMessage(
getNmTokenMasterKeyFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 24: {
int tmpRaw = input.readEnum();
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeActionProto tmpValue =
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeActionProto.forNumber(tmpRaw);
if (tmpValue == null) {
mergeUnknownVarintField(3, tmpRaw);
} else {
nodeAction_ = tmpRaw;
bitField0_ |= 0x00000004;
}
break;
} // case 24
case 32: {
rmIdentifier_ = input.readInt64();
bitField0_ |= 0x00000008;
break;
} // case 32
case 42: {
diagnosticsMessage_ = input.readBytes();
bitField0_ |= 0x00000010;
break;
} // case 42
case 50: {
rmVersion_ = input.readBytes();
bitField0_ |= 0x00000020;
break;
} // case 50
case 56: {
areNodeLabelsAcceptedByRM_ = input.readBool();
bitField0_ |= 0x00000040;
break;
} // case 56
case 66: {
input.readMessage(
getResourceFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000080;
break;
} // case 66
case 72: {
areNodeAttributesAcceptedByRM_ = input.readBool();
bitField0_ |= 0x00000100;
break;
} // case 72
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto containerTokenMasterKey_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder> containerTokenMasterKeyBuilder_;
/**
* optional .hadoop.yarn.MasterKeyProto container_token_master_key = 1;
* @return Whether the containerTokenMasterKey field is set.
*/
public boolean hasContainerTokenMasterKey() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.MasterKeyProto container_token_master_key = 1;
* @return The containerTokenMasterKey.
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto getContainerTokenMasterKey() {
if (containerTokenMasterKeyBuilder_ == null) {
return containerTokenMasterKey_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.getDefaultInstance() : containerTokenMasterKey_;
} else {
return containerTokenMasterKeyBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.MasterKeyProto container_token_master_key = 1;
*/
public Builder setContainerTokenMasterKey(org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto value) {
if (containerTokenMasterKeyBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
containerTokenMasterKey_ = value;
} else {
containerTokenMasterKeyBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.MasterKeyProto container_token_master_key = 1;
*/
public Builder setContainerTokenMasterKey(
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.Builder builderForValue) {
if (containerTokenMasterKeyBuilder_ == null) {
containerTokenMasterKey_ = builderForValue.build();
} else {
containerTokenMasterKeyBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.MasterKeyProto container_token_master_key = 1;
*/
public Builder mergeContainerTokenMasterKey(org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto value) {
if (containerTokenMasterKeyBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
containerTokenMasterKey_ != null &&
containerTokenMasterKey_ != org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.getDefaultInstance()) {
getContainerTokenMasterKeyBuilder().mergeFrom(value);
} else {
containerTokenMasterKey_ = value;
}
} else {
containerTokenMasterKeyBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.MasterKeyProto container_token_master_key = 1;
*/
public Builder clearContainerTokenMasterKey() {
bitField0_ = (bitField0_ & ~0x00000001);
containerTokenMasterKey_ = null;
if (containerTokenMasterKeyBuilder_ != null) {
containerTokenMasterKeyBuilder_.dispose();
containerTokenMasterKeyBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.MasterKeyProto container_token_master_key = 1;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.Builder getContainerTokenMasterKeyBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getContainerTokenMasterKeyFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.MasterKeyProto container_token_master_key = 1;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder getContainerTokenMasterKeyOrBuilder() {
if (containerTokenMasterKeyBuilder_ != null) {
return containerTokenMasterKeyBuilder_.getMessageOrBuilder();
} else {
return containerTokenMasterKey_ == null ?
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.getDefaultInstance() : containerTokenMasterKey_;
}
}
/**
* optional .hadoop.yarn.MasterKeyProto container_token_master_key = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder>
getContainerTokenMasterKeyFieldBuilder() {
if (containerTokenMasterKeyBuilder_ == null) {
containerTokenMasterKeyBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder>(
getContainerTokenMasterKey(),
getParentForChildren(),
isClean());
containerTokenMasterKey_ = null;
}
return containerTokenMasterKeyBuilder_;
}
private org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto nmTokenMasterKey_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder> nmTokenMasterKeyBuilder_;
/**
* optional .hadoop.yarn.MasterKeyProto nm_token_master_key = 2;
* @return Whether the nmTokenMasterKey field is set.
*/
public boolean hasNmTokenMasterKey() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional .hadoop.yarn.MasterKeyProto nm_token_master_key = 2;
* @return The nmTokenMasterKey.
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto getNmTokenMasterKey() {
if (nmTokenMasterKeyBuilder_ == null) {
return nmTokenMasterKey_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.getDefaultInstance() : nmTokenMasterKey_;
} else {
return nmTokenMasterKeyBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.MasterKeyProto nm_token_master_key = 2;
*/
public Builder setNmTokenMasterKey(org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto value) {
if (nmTokenMasterKeyBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
nmTokenMasterKey_ = value;
} else {
nmTokenMasterKeyBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.MasterKeyProto nm_token_master_key = 2;
*/
public Builder setNmTokenMasterKey(
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.Builder builderForValue) {
if (nmTokenMasterKeyBuilder_ == null) {
nmTokenMasterKey_ = builderForValue.build();
} else {
nmTokenMasterKeyBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.MasterKeyProto nm_token_master_key = 2;
*/
public Builder mergeNmTokenMasterKey(org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto value) {
if (nmTokenMasterKeyBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0) &&
nmTokenMasterKey_ != null &&
nmTokenMasterKey_ != org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.getDefaultInstance()) {
getNmTokenMasterKeyBuilder().mergeFrom(value);
} else {
nmTokenMasterKey_ = value;
}
} else {
nmTokenMasterKeyBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.MasterKeyProto nm_token_master_key = 2;
*/
public Builder clearNmTokenMasterKey() {
bitField0_ = (bitField0_ & ~0x00000002);
nmTokenMasterKey_ = null;
if (nmTokenMasterKeyBuilder_ != null) {
nmTokenMasterKeyBuilder_.dispose();
nmTokenMasterKeyBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.MasterKeyProto nm_token_master_key = 2;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.Builder getNmTokenMasterKeyBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getNmTokenMasterKeyFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.MasterKeyProto nm_token_master_key = 2;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder getNmTokenMasterKeyOrBuilder() {
if (nmTokenMasterKeyBuilder_ != null) {
return nmTokenMasterKeyBuilder_.getMessageOrBuilder();
} else {
return nmTokenMasterKey_ == null ?
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.getDefaultInstance() : nmTokenMasterKey_;
}
}
/**
* optional .hadoop.yarn.MasterKeyProto nm_token_master_key = 2;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder>
getNmTokenMasterKeyFieldBuilder() {
if (nmTokenMasterKeyBuilder_ == null) {
nmTokenMasterKeyBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder>(
getNmTokenMasterKey(),
getParentForChildren(),
isClean());
nmTokenMasterKey_ = null;
}
return nmTokenMasterKeyBuilder_;
}
private int nodeAction_ = 0;
/**
* optional .hadoop.yarn.NodeActionProto nodeAction = 3;
* @return Whether the nodeAction field is set.
*/
@java.lang.Override public boolean hasNodeAction() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional .hadoop.yarn.NodeActionProto nodeAction = 3;
* @return The nodeAction.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeActionProto getNodeAction() {
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeActionProto result = org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeActionProto.forNumber(nodeAction_);
return result == null ? org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeActionProto.NORMAL : result;
}
/**
* optional .hadoop.yarn.NodeActionProto nodeAction = 3;
* @param value The nodeAction to set.
* @return This builder for chaining.
*/
public Builder setNodeAction(org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeActionProto value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
nodeAction_ = value.getNumber();
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeActionProto nodeAction = 3;
* @return This builder for chaining.
*/
public Builder clearNodeAction() {
bitField0_ = (bitField0_ & ~0x00000004);
nodeAction_ = 0;
onChanged();
return this;
}
private long rmIdentifier_ ;
/**
* optional int64 rm_identifier = 4;
* @return Whether the rmIdentifier field is set.
*/
@java.lang.Override
public boolean hasRmIdentifier() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* optional int64 rm_identifier = 4;
* @return The rmIdentifier.
*/
@java.lang.Override
public long getRmIdentifier() {
return rmIdentifier_;
}
/**
* optional int64 rm_identifier = 4;
* @param value The rmIdentifier to set.
* @return This builder for chaining.
*/
public Builder setRmIdentifier(long value) {
rmIdentifier_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
* optional int64 rm_identifier = 4;
* @return This builder for chaining.
*/
public Builder clearRmIdentifier() {
bitField0_ = (bitField0_ & ~0x00000008);
rmIdentifier_ = 0L;
onChanged();
return this;
}
private java.lang.Object diagnosticsMessage_ = "";
/**
* optional string diagnostics_message = 5;
* @return Whether the diagnosticsMessage field is set.
*/
public boolean hasDiagnosticsMessage() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* optional string diagnostics_message = 5;
* @return The diagnosticsMessage.
*/
public java.lang.String getDiagnosticsMessage() {
java.lang.Object ref = diagnosticsMessage_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
diagnosticsMessage_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string diagnostics_message = 5;
* @return The bytes for diagnosticsMessage.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getDiagnosticsMessageBytes() {
java.lang.Object ref = diagnosticsMessage_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
diagnosticsMessage_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string diagnostics_message = 5;
* @param value The diagnosticsMessage to set.
* @return This builder for chaining.
*/
public Builder setDiagnosticsMessage(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
diagnosticsMessage_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
* optional string diagnostics_message = 5;
* @return This builder for chaining.
*/
public Builder clearDiagnosticsMessage() {
diagnosticsMessage_ = getDefaultInstance().getDiagnosticsMessage();
bitField0_ = (bitField0_ & ~0x00000010);
onChanged();
return this;
}
/**
* optional string diagnostics_message = 5;
* @param value The bytes for diagnosticsMessage to set.
* @return This builder for chaining.
*/
public Builder setDiagnosticsMessageBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
diagnosticsMessage_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
private java.lang.Object rmVersion_ = "";
/**
* optional string rm_version = 6;
* @return Whether the rmVersion field is set.
*/
public boolean hasRmVersion() {
return ((bitField0_ & 0x00000020) != 0);
}
/**
* optional string rm_version = 6;
* @return The rmVersion.
*/
public java.lang.String getRmVersion() {
java.lang.Object ref = rmVersion_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
rmVersion_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string rm_version = 6;
* @return The bytes for rmVersion.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getRmVersionBytes() {
java.lang.Object ref = rmVersion_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
rmVersion_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string rm_version = 6;
* @param value The rmVersion to set.
* @return This builder for chaining.
*/
public Builder setRmVersion(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
rmVersion_ = value;
bitField0_ |= 0x00000020;
onChanged();
return this;
}
/**
* optional string rm_version = 6;
* @return This builder for chaining.
*/
public Builder clearRmVersion() {
rmVersion_ = getDefaultInstance().getRmVersion();
bitField0_ = (bitField0_ & ~0x00000020);
onChanged();
return this;
}
/**
* optional string rm_version = 6;
* @param value The bytes for rmVersion to set.
* @return This builder for chaining.
*/
public Builder setRmVersionBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
rmVersion_ = value;
bitField0_ |= 0x00000020;
onChanged();
return this;
}
private boolean areNodeLabelsAcceptedByRM_ ;
/**
* optional bool areNodeLabelsAcceptedByRM = 7 [default = false];
* @return Whether the areNodeLabelsAcceptedByRM field is set.
*/
@java.lang.Override
public boolean hasAreNodeLabelsAcceptedByRM() {
return ((bitField0_ & 0x00000040) != 0);
}
/**
* optional bool areNodeLabelsAcceptedByRM = 7 [default = false];
* @return The areNodeLabelsAcceptedByRM.
*/
@java.lang.Override
public boolean getAreNodeLabelsAcceptedByRM() {
return areNodeLabelsAcceptedByRM_;
}
/**
* optional bool areNodeLabelsAcceptedByRM = 7 [default = false];
* @param value The areNodeLabelsAcceptedByRM to set.
* @return This builder for chaining.
*/
public Builder setAreNodeLabelsAcceptedByRM(boolean value) {
areNodeLabelsAcceptedByRM_ = value;
bitField0_ |= 0x00000040;
onChanged();
return this;
}
/**
* optional bool areNodeLabelsAcceptedByRM = 7 [default = false];
* @return This builder for chaining.
*/
public Builder clearAreNodeLabelsAcceptedByRM() {
bitField0_ = (bitField0_ & ~0x00000040);
areNodeLabelsAcceptedByRM_ = false;
onChanged();
return this;
}
private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resource_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> resourceBuilder_;
/**
* optional .hadoop.yarn.ResourceProto resource = 8;
* @return Whether the resource field is set.
*/
public boolean hasResource() {
return ((bitField0_ & 0x00000080) != 0);
}
/**
* optional .hadoop.yarn.ResourceProto resource = 8;
* @return The resource.
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource() {
if (resourceBuilder_ == null) {
return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
} else {
return resourceBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.ResourceProto resource = 8;
*/
public Builder setResource(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
if (resourceBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
resource_ = value;
} else {
resourceBuilder_.setMessage(value);
}
bitField0_ |= 0x00000080;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ResourceProto resource = 8;
*/
public Builder setResource(
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) {
if (resourceBuilder_ == null) {
resource_ = builderForValue.build();
} else {
resourceBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000080;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ResourceProto resource = 8;
*/
public Builder mergeResource(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
if (resourceBuilder_ == null) {
if (((bitField0_ & 0x00000080) != 0) &&
resource_ != null &&
resource_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) {
getResourceBuilder().mergeFrom(value);
} else {
resource_ = value;
}
} else {
resourceBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000080;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ResourceProto resource = 8;
*/
public Builder clearResource() {
bitField0_ = (bitField0_ & ~0x00000080);
resource_ = null;
if (resourceBuilder_ != null) {
resourceBuilder_.dispose();
resourceBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ResourceProto resource = 8;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getResourceBuilder() {
bitField0_ |= 0x00000080;
onChanged();
return getResourceFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.ResourceProto resource = 8;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder() {
if (resourceBuilder_ != null) {
return resourceBuilder_.getMessageOrBuilder();
} else {
return resource_ == null ?
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
}
}
/**
* optional .hadoop.yarn.ResourceProto resource = 8;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>
getResourceFieldBuilder() {
if (resourceBuilder_ == null) {
resourceBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>(
getResource(),
getParentForChildren(),
isClean());
resource_ = null;
}
return resourceBuilder_;
}
private boolean areNodeAttributesAcceptedByRM_ ;
/**
* optional bool areNodeAttributesAcceptedByRM = 9 [default = false];
* @return Whether the areNodeAttributesAcceptedByRM field is set.
*/
@java.lang.Override
public boolean hasAreNodeAttributesAcceptedByRM() {
return ((bitField0_ & 0x00000100) != 0);
}
/**
* optional bool areNodeAttributesAcceptedByRM = 9 [default = false];
* @return The areNodeAttributesAcceptedByRM.
*/
@java.lang.Override
public boolean getAreNodeAttributesAcceptedByRM() {
return areNodeAttributesAcceptedByRM_;
}
/**
* optional bool areNodeAttributesAcceptedByRM = 9 [default = false];
* @param value The areNodeAttributesAcceptedByRM to set.
* @return This builder for chaining.
*/
public Builder setAreNodeAttributesAcceptedByRM(boolean value) {
areNodeAttributesAcceptedByRM_ = value;
bitField0_ |= 0x00000100;
onChanged();
return this;
}
/**
* optional bool areNodeAttributesAcceptedByRM = 9 [default = false];
* @return This builder for chaining.
*/
public Builder clearAreNodeAttributesAcceptedByRM() {
bitField0_ = (bitField0_ & ~0x00000100);
areNodeAttributesAcceptedByRM_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.RegisterNodeManagerResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.RegisterNodeManagerResponseProto)
private static final org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto();
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public RegisterNodeManagerResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface UnRegisterNodeManagerRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.UnRegisterNodeManagerRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
* @return Whether the nodeId field is set.
*/
boolean hasNodeId();
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
* @return The nodeId.
*/
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId();
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
*/
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder();
}
/**
* Protobuf type {@code hadoop.yarn.UnRegisterNodeManagerRequestProto}
*/
public static final class UnRegisterNodeManagerRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.UnRegisterNodeManagerRequestProto)
UnRegisterNodeManagerRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use UnRegisterNodeManagerRequestProto.newBuilder() to construct.
private UnRegisterNodeManagerRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private UnRegisterNodeManagerRequestProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new UnRegisterNodeManagerRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_UnRegisterNodeManagerRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_UnRegisterNodeManagerRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto.Builder.class);
}
private int bitField0_;
public static final int NODE_ID_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto nodeId_;
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
* @return Whether the nodeId field is set.
*/
@java.lang.Override
public boolean hasNodeId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
* @return The nodeId.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId() {
return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
}
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder() {
return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getNodeId());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getNodeId());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto other = (org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto) obj;
if (hasNodeId() != other.hasNodeId()) return false;
if (hasNodeId()) {
if (!getNodeId()
.equals(other.getNodeId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasNodeId()) {
hash = (37 * hash) + NODE_ID_FIELD_NUMBER;
hash = (53 * hash) + getNodeId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.UnRegisterNodeManagerRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.UnRegisterNodeManagerRequestProto)
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_UnRegisterNodeManagerRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_UnRegisterNodeManagerRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getNodeIdFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
nodeId_ = null;
if (nodeIdBuilder_ != null) {
nodeIdBuilder_.dispose();
nodeIdBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_UnRegisterNodeManagerRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto build() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto result = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.nodeId_ = nodeIdBuilder_ == null
? nodeId_
: nodeIdBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto) {
return mergeFrom((org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto.getDefaultInstance()) return this;
if (other.hasNodeId()) {
mergeNodeId(other.getNodeId());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getNodeIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto nodeId_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> nodeIdBuilder_;
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
* @return Whether the nodeId field is set.
*/
public boolean hasNodeId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
* @return The nodeId.
*/
public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId() {
if (nodeIdBuilder_ == null) {
return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
} else {
return nodeIdBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
*/
public Builder setNodeId(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) {
if (nodeIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
nodeId_ = value;
} else {
nodeIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
*/
public Builder setNodeId(
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder builderForValue) {
if (nodeIdBuilder_ == null) {
nodeId_ = builderForValue.build();
} else {
nodeIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
*/
public Builder mergeNodeId(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) {
if (nodeIdBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
nodeId_ != null &&
nodeId_ != org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance()) {
getNodeIdBuilder().mergeFrom(value);
} else {
nodeId_ = value;
}
} else {
nodeIdBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
*/
public Builder clearNodeId() {
bitField0_ = (bitField0_ & ~0x00000001);
nodeId_ = null;
if (nodeIdBuilder_ != null) {
nodeIdBuilder_.dispose();
nodeIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder getNodeIdBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getNodeIdFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder() {
if (nodeIdBuilder_ != null) {
return nodeIdBuilder_.getMessageOrBuilder();
} else {
return nodeId_ == null ?
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
}
}
/**
* optional .hadoop.yarn.NodeIdProto node_id = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder>
getNodeIdFieldBuilder() {
if (nodeIdBuilder_ == null) {
nodeIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder>(
getNodeId(),
getParentForChildren(),
isClean());
nodeId_ = null;
}
return nodeIdBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.UnRegisterNodeManagerRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.UnRegisterNodeManagerRequestProto)
private static final org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto();
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public UnRegisterNodeManagerRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface UnRegisterNodeManagerResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.UnRegisterNodeManagerResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.yarn.UnRegisterNodeManagerResponseProto}
*/
public static final class UnRegisterNodeManagerResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.UnRegisterNodeManagerResponseProto)
UnRegisterNodeManagerResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use UnRegisterNodeManagerResponseProto.newBuilder() to construct.
private UnRegisterNodeManagerResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private UnRegisterNodeManagerResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new UnRegisterNodeManagerResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_UnRegisterNodeManagerResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_UnRegisterNodeManagerResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto other = (org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.UnRegisterNodeManagerResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.UnRegisterNodeManagerResponseProto)
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_UnRegisterNodeManagerResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_UnRegisterNodeManagerResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_UnRegisterNodeManagerResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto build() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto result = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto) {
return mergeFrom((org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.UnRegisterNodeManagerResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.UnRegisterNodeManagerResponseProto)
private static final org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto();
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public UnRegisterNodeManagerResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface NodeHeartbeatRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.NodeHeartbeatRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.NodeStatusProto node_status = 1;
* @return Whether the nodeStatus field is set.
*/
boolean hasNodeStatus();
/**
* optional .hadoop.yarn.NodeStatusProto node_status = 1;
* @return The nodeStatus.
*/
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto getNodeStatus();
/**
* optional .hadoop.yarn.NodeStatusProto node_status = 1;
*/
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProtoOrBuilder getNodeStatusOrBuilder();
/**
* optional .hadoop.yarn.MasterKeyProto last_known_container_token_master_key = 2;
* @return Whether the lastKnownContainerTokenMasterKey field is set.
*/
boolean hasLastKnownContainerTokenMasterKey();
/**
* optional .hadoop.yarn.MasterKeyProto last_known_container_token_master_key = 2;
* @return The lastKnownContainerTokenMasterKey.
*/
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto getLastKnownContainerTokenMasterKey();
/**
* optional .hadoop.yarn.MasterKeyProto last_known_container_token_master_key = 2;
*/
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder getLastKnownContainerTokenMasterKeyOrBuilder();
/**
* optional .hadoop.yarn.MasterKeyProto last_known_nm_token_master_key = 3;
* @return Whether the lastKnownNmTokenMasterKey field is set.
*/
boolean hasLastKnownNmTokenMasterKey();
/**
* optional .hadoop.yarn.MasterKeyProto last_known_nm_token_master_key = 3;
* @return The lastKnownNmTokenMasterKey.
*/
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto getLastKnownNmTokenMasterKey();
/**
* optional .hadoop.yarn.MasterKeyProto last_known_nm_token_master_key = 3;
*/
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder getLastKnownNmTokenMasterKeyOrBuilder();
/**
* optional .hadoop.yarn.NodeLabelsProto nodeLabels = 4;
* @return Whether the nodeLabels field is set.
*/
boolean hasNodeLabels();
/**
* optional .hadoop.yarn.NodeLabelsProto nodeLabels = 4;
* @return The nodeLabels.
*/
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto getNodeLabels();
/**
* optional .hadoop.yarn.NodeLabelsProto nodeLabels = 4;
*/
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProtoOrBuilder getNodeLabelsOrBuilder();
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 5;
*/
java.util.List
getLogAggregationReportsForAppsList();
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 5;
*/
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto getLogAggregationReportsForApps(int index);
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 5;
*/
int getLogAggregationReportsForAppsCount();
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 5;
*/
java.util.List extends org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProtoOrBuilder>
getLogAggregationReportsForAppsOrBuilderList();
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 5;
*/
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProtoOrBuilder getLogAggregationReportsForAppsOrBuilder(
int index);
/**
* repeated .hadoop.yarn.AppCollectorDataProto registering_collectors = 6;
*/
java.util.List
getRegisteringCollectorsList();
/**
* repeated .hadoop.yarn.AppCollectorDataProto registering_collectors = 6;
*/
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto getRegisteringCollectors(int index);
/**
* repeated .hadoop.yarn.AppCollectorDataProto registering_collectors = 6;
*/
int getRegisteringCollectorsCount();
/**
* repeated .hadoop.yarn.AppCollectorDataProto registering_collectors = 6;
*/
java.util.List extends org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProtoOrBuilder>
getRegisteringCollectorsOrBuilderList();
/**
* repeated .hadoop.yarn.AppCollectorDataProto registering_collectors = 6;
*/
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProtoOrBuilder getRegisteringCollectorsOrBuilder(
int index);
/**
* optional .hadoop.yarn.NodeAttributesProto nodeAttributes = 7;
* @return Whether the nodeAttributes field is set.
*/
boolean hasNodeAttributes();
/**
* optional .hadoop.yarn.NodeAttributesProto nodeAttributes = 7;
* @return The nodeAttributes.
*/
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto getNodeAttributes();
/**
* optional .hadoop.yarn.NodeAttributesProto nodeAttributes = 7;
*/
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProtoOrBuilder getNodeAttributesOrBuilder();
/**
* optional int64 tokenSequenceNo = 8;
* @return Whether the tokenSequenceNo field is set.
*/
boolean hasTokenSequenceNo();
/**
* optional int64 tokenSequenceNo = 8;
* @return The tokenSequenceNo.
*/
long getTokenSequenceNo();
}
/**
* Protobuf type {@code hadoop.yarn.NodeHeartbeatRequestProto}
*/
public static final class NodeHeartbeatRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.NodeHeartbeatRequestProto)
NodeHeartbeatRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use NodeHeartbeatRequestProto.newBuilder() to construct.
private NodeHeartbeatRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private NodeHeartbeatRequestProto() {
logAggregationReportsForApps_ = java.util.Collections.emptyList();
registeringCollectors_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new NodeHeartbeatRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_NodeHeartbeatRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_NodeHeartbeatRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto.Builder.class);
}
private int bitField0_;
public static final int NODE_STATUS_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto nodeStatus_;
/**
* optional .hadoop.yarn.NodeStatusProto node_status = 1;
* @return Whether the nodeStatus field is set.
*/
@java.lang.Override
public boolean hasNodeStatus() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.NodeStatusProto node_status = 1;
* @return The nodeStatus.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto getNodeStatus() {
return nodeStatus_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto.getDefaultInstance() : nodeStatus_;
}
/**
* optional .hadoop.yarn.NodeStatusProto node_status = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProtoOrBuilder getNodeStatusOrBuilder() {
return nodeStatus_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto.getDefaultInstance() : nodeStatus_;
}
public static final int LAST_KNOWN_CONTAINER_TOKEN_MASTER_KEY_FIELD_NUMBER = 2;
private org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto lastKnownContainerTokenMasterKey_;
/**
* optional .hadoop.yarn.MasterKeyProto last_known_container_token_master_key = 2;
* @return Whether the lastKnownContainerTokenMasterKey field is set.
*/
@java.lang.Override
public boolean hasLastKnownContainerTokenMasterKey() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional .hadoop.yarn.MasterKeyProto last_known_container_token_master_key = 2;
* @return The lastKnownContainerTokenMasterKey.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto getLastKnownContainerTokenMasterKey() {
return lastKnownContainerTokenMasterKey_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.getDefaultInstance() : lastKnownContainerTokenMasterKey_;
}
/**
* optional .hadoop.yarn.MasterKeyProto last_known_container_token_master_key = 2;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder getLastKnownContainerTokenMasterKeyOrBuilder() {
return lastKnownContainerTokenMasterKey_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.getDefaultInstance() : lastKnownContainerTokenMasterKey_;
}
public static final int LAST_KNOWN_NM_TOKEN_MASTER_KEY_FIELD_NUMBER = 3;
private org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto lastKnownNmTokenMasterKey_;
/**
* optional .hadoop.yarn.MasterKeyProto last_known_nm_token_master_key = 3;
* @return Whether the lastKnownNmTokenMasterKey field is set.
*/
@java.lang.Override
public boolean hasLastKnownNmTokenMasterKey() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional .hadoop.yarn.MasterKeyProto last_known_nm_token_master_key = 3;
* @return The lastKnownNmTokenMasterKey.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto getLastKnownNmTokenMasterKey() {
return lastKnownNmTokenMasterKey_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.getDefaultInstance() : lastKnownNmTokenMasterKey_;
}
/**
* optional .hadoop.yarn.MasterKeyProto last_known_nm_token_master_key = 3;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder getLastKnownNmTokenMasterKeyOrBuilder() {
return lastKnownNmTokenMasterKey_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.getDefaultInstance() : lastKnownNmTokenMasterKey_;
}
public static final int NODELABELS_FIELD_NUMBER = 4;
private org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto nodeLabels_;
/**
* optional .hadoop.yarn.NodeLabelsProto nodeLabels = 4;
* @return Whether the nodeLabels field is set.
*/
@java.lang.Override
public boolean hasNodeLabels() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* optional .hadoop.yarn.NodeLabelsProto nodeLabels = 4;
* @return The nodeLabels.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto getNodeLabels() {
return nodeLabels_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto.getDefaultInstance() : nodeLabels_;
}
/**
* optional .hadoop.yarn.NodeLabelsProto nodeLabels = 4;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProtoOrBuilder getNodeLabelsOrBuilder() {
return nodeLabels_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto.getDefaultInstance() : nodeLabels_;
}
public static final int LOG_AGGREGATION_REPORTS_FOR_APPS_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private java.util.List logAggregationReportsForApps_;
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 5;
*/
@java.lang.Override
public java.util.List getLogAggregationReportsForAppsList() {
return logAggregationReportsForApps_;
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 5;
*/
@java.lang.Override
public java.util.List extends org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProtoOrBuilder>
getLogAggregationReportsForAppsOrBuilderList() {
return logAggregationReportsForApps_;
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 5;
*/
@java.lang.Override
public int getLogAggregationReportsForAppsCount() {
return logAggregationReportsForApps_.size();
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 5;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto getLogAggregationReportsForApps(int index) {
return logAggregationReportsForApps_.get(index);
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 5;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProtoOrBuilder getLogAggregationReportsForAppsOrBuilder(
int index) {
return logAggregationReportsForApps_.get(index);
}
public static final int REGISTERING_COLLECTORS_FIELD_NUMBER = 6;
@SuppressWarnings("serial")
private java.util.List registeringCollectors_;
/**
* repeated .hadoop.yarn.AppCollectorDataProto registering_collectors = 6;
*/
@java.lang.Override
public java.util.List getRegisteringCollectorsList() {
return registeringCollectors_;
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto registering_collectors = 6;
*/
@java.lang.Override
public java.util.List extends org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProtoOrBuilder>
getRegisteringCollectorsOrBuilderList() {
return registeringCollectors_;
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto registering_collectors = 6;
*/
@java.lang.Override
public int getRegisteringCollectorsCount() {
return registeringCollectors_.size();
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto registering_collectors = 6;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto getRegisteringCollectors(int index) {
return registeringCollectors_.get(index);
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto registering_collectors = 6;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProtoOrBuilder getRegisteringCollectorsOrBuilder(
int index) {
return registeringCollectors_.get(index);
}
public static final int NODEATTRIBUTES_FIELD_NUMBER = 7;
private org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto nodeAttributes_;
/**
* optional .hadoop.yarn.NodeAttributesProto nodeAttributes = 7;
* @return Whether the nodeAttributes field is set.
*/
@java.lang.Override
public boolean hasNodeAttributes() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* optional .hadoop.yarn.NodeAttributesProto nodeAttributes = 7;
* @return The nodeAttributes.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto getNodeAttributes() {
return nodeAttributes_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto.getDefaultInstance() : nodeAttributes_;
}
/**
* optional .hadoop.yarn.NodeAttributesProto nodeAttributes = 7;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProtoOrBuilder getNodeAttributesOrBuilder() {
return nodeAttributes_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto.getDefaultInstance() : nodeAttributes_;
}
public static final int TOKENSEQUENCENO_FIELD_NUMBER = 8;
private long tokenSequenceNo_ = 0L;
/**
* optional int64 tokenSequenceNo = 8;
* @return Whether the tokenSequenceNo field is set.
*/
@java.lang.Override
public boolean hasTokenSequenceNo() {
return ((bitField0_ & 0x00000020) != 0);
}
/**
* optional int64 tokenSequenceNo = 8;
* @return The tokenSequenceNo.
*/
@java.lang.Override
public long getTokenSequenceNo() {
return tokenSequenceNo_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (hasNodeStatus()) {
if (!getNodeStatus().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
for (int i = 0; i < getRegisteringCollectorsCount(); i++) {
if (!getRegisteringCollectors(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
if (hasNodeAttributes()) {
if (!getNodeAttributes().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getNodeStatus());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getLastKnownContainerTokenMasterKey());
}
if (((bitField0_ & 0x00000004) != 0)) {
output.writeMessage(3, getLastKnownNmTokenMasterKey());
}
if (((bitField0_ & 0x00000008) != 0)) {
output.writeMessage(4, getNodeLabels());
}
for (int i = 0; i < logAggregationReportsForApps_.size(); i++) {
output.writeMessage(5, logAggregationReportsForApps_.get(i));
}
for (int i = 0; i < registeringCollectors_.size(); i++) {
output.writeMessage(6, registeringCollectors_.get(i));
}
if (((bitField0_ & 0x00000010) != 0)) {
output.writeMessage(7, getNodeAttributes());
}
if (((bitField0_ & 0x00000020) != 0)) {
output.writeInt64(8, tokenSequenceNo_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getNodeStatus());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(2, getLastKnownContainerTokenMasterKey());
}
if (((bitField0_ & 0x00000004) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(3, getLastKnownNmTokenMasterKey());
}
if (((bitField0_ & 0x00000008) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(4, getNodeLabels());
}
for (int i = 0; i < logAggregationReportsForApps_.size(); i++) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(5, logAggregationReportsForApps_.get(i));
}
for (int i = 0; i < registeringCollectors_.size(); i++) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(6, registeringCollectors_.get(i));
}
if (((bitField0_ & 0x00000010) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(7, getNodeAttributes());
}
if (((bitField0_ & 0x00000020) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt64Size(8, tokenSequenceNo_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto other = (org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto) obj;
if (hasNodeStatus() != other.hasNodeStatus()) return false;
if (hasNodeStatus()) {
if (!getNodeStatus()
.equals(other.getNodeStatus())) return false;
}
if (hasLastKnownContainerTokenMasterKey() != other.hasLastKnownContainerTokenMasterKey()) return false;
if (hasLastKnownContainerTokenMasterKey()) {
if (!getLastKnownContainerTokenMasterKey()
.equals(other.getLastKnownContainerTokenMasterKey())) return false;
}
if (hasLastKnownNmTokenMasterKey() != other.hasLastKnownNmTokenMasterKey()) return false;
if (hasLastKnownNmTokenMasterKey()) {
if (!getLastKnownNmTokenMasterKey()
.equals(other.getLastKnownNmTokenMasterKey())) return false;
}
if (hasNodeLabels() != other.hasNodeLabels()) return false;
if (hasNodeLabels()) {
if (!getNodeLabels()
.equals(other.getNodeLabels())) return false;
}
if (!getLogAggregationReportsForAppsList()
.equals(other.getLogAggregationReportsForAppsList())) return false;
if (!getRegisteringCollectorsList()
.equals(other.getRegisteringCollectorsList())) return false;
if (hasNodeAttributes() != other.hasNodeAttributes()) return false;
if (hasNodeAttributes()) {
if (!getNodeAttributes()
.equals(other.getNodeAttributes())) return false;
}
if (hasTokenSequenceNo() != other.hasTokenSequenceNo()) return false;
if (hasTokenSequenceNo()) {
if (getTokenSequenceNo()
!= other.getTokenSequenceNo()) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasNodeStatus()) {
hash = (37 * hash) + NODE_STATUS_FIELD_NUMBER;
hash = (53 * hash) + getNodeStatus().hashCode();
}
if (hasLastKnownContainerTokenMasterKey()) {
hash = (37 * hash) + LAST_KNOWN_CONTAINER_TOKEN_MASTER_KEY_FIELD_NUMBER;
hash = (53 * hash) + getLastKnownContainerTokenMasterKey().hashCode();
}
if (hasLastKnownNmTokenMasterKey()) {
hash = (37 * hash) + LAST_KNOWN_NM_TOKEN_MASTER_KEY_FIELD_NUMBER;
hash = (53 * hash) + getLastKnownNmTokenMasterKey().hashCode();
}
if (hasNodeLabels()) {
hash = (37 * hash) + NODELABELS_FIELD_NUMBER;
hash = (53 * hash) + getNodeLabels().hashCode();
}
if (getLogAggregationReportsForAppsCount() > 0) {
hash = (37 * hash) + LOG_AGGREGATION_REPORTS_FOR_APPS_FIELD_NUMBER;
hash = (53 * hash) + getLogAggregationReportsForAppsList().hashCode();
}
if (getRegisteringCollectorsCount() > 0) {
hash = (37 * hash) + REGISTERING_COLLECTORS_FIELD_NUMBER;
hash = (53 * hash) + getRegisteringCollectorsList().hashCode();
}
if (hasNodeAttributes()) {
hash = (37 * hash) + NODEATTRIBUTES_FIELD_NUMBER;
hash = (53 * hash) + getNodeAttributes().hashCode();
}
if (hasTokenSequenceNo()) {
hash = (37 * hash) + TOKENSEQUENCENO_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
getTokenSequenceNo());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.NodeHeartbeatRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.NodeHeartbeatRequestProto)
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_NodeHeartbeatRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_NodeHeartbeatRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getNodeStatusFieldBuilder();
getLastKnownContainerTokenMasterKeyFieldBuilder();
getLastKnownNmTokenMasterKeyFieldBuilder();
getNodeLabelsFieldBuilder();
getLogAggregationReportsForAppsFieldBuilder();
getRegisteringCollectorsFieldBuilder();
getNodeAttributesFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
nodeStatus_ = null;
if (nodeStatusBuilder_ != null) {
nodeStatusBuilder_.dispose();
nodeStatusBuilder_ = null;
}
lastKnownContainerTokenMasterKey_ = null;
if (lastKnownContainerTokenMasterKeyBuilder_ != null) {
lastKnownContainerTokenMasterKeyBuilder_.dispose();
lastKnownContainerTokenMasterKeyBuilder_ = null;
}
lastKnownNmTokenMasterKey_ = null;
if (lastKnownNmTokenMasterKeyBuilder_ != null) {
lastKnownNmTokenMasterKeyBuilder_.dispose();
lastKnownNmTokenMasterKeyBuilder_ = null;
}
nodeLabels_ = null;
if (nodeLabelsBuilder_ != null) {
nodeLabelsBuilder_.dispose();
nodeLabelsBuilder_ = null;
}
if (logAggregationReportsForAppsBuilder_ == null) {
logAggregationReportsForApps_ = java.util.Collections.emptyList();
} else {
logAggregationReportsForApps_ = null;
logAggregationReportsForAppsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000010);
if (registeringCollectorsBuilder_ == null) {
registeringCollectors_ = java.util.Collections.emptyList();
} else {
registeringCollectors_ = null;
registeringCollectorsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000020);
nodeAttributes_ = null;
if (nodeAttributesBuilder_ != null) {
nodeAttributesBuilder_.dispose();
nodeAttributesBuilder_ = null;
}
tokenSequenceNo_ = 0L;
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_NodeHeartbeatRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto build() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto result = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto result) {
if (logAggregationReportsForAppsBuilder_ == null) {
if (((bitField0_ & 0x00000010) != 0)) {
logAggregationReportsForApps_ = java.util.Collections.unmodifiableList(logAggregationReportsForApps_);
bitField0_ = (bitField0_ & ~0x00000010);
}
result.logAggregationReportsForApps_ = logAggregationReportsForApps_;
} else {
result.logAggregationReportsForApps_ = logAggregationReportsForAppsBuilder_.build();
}
if (registeringCollectorsBuilder_ == null) {
if (((bitField0_ & 0x00000020) != 0)) {
registeringCollectors_ = java.util.Collections.unmodifiableList(registeringCollectors_);
bitField0_ = (bitField0_ & ~0x00000020);
}
result.registeringCollectors_ = registeringCollectors_;
} else {
result.registeringCollectors_ = registeringCollectorsBuilder_.build();
}
}
private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.nodeStatus_ = nodeStatusBuilder_ == null
? nodeStatus_
: nodeStatusBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.lastKnownContainerTokenMasterKey_ = lastKnownContainerTokenMasterKeyBuilder_ == null
? lastKnownContainerTokenMasterKey_
: lastKnownContainerTokenMasterKeyBuilder_.build();
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.lastKnownNmTokenMasterKey_ = lastKnownNmTokenMasterKeyBuilder_ == null
? lastKnownNmTokenMasterKey_
: lastKnownNmTokenMasterKeyBuilder_.build();
to_bitField0_ |= 0x00000004;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.nodeLabels_ = nodeLabelsBuilder_ == null
? nodeLabels_
: nodeLabelsBuilder_.build();
to_bitField0_ |= 0x00000008;
}
if (((from_bitField0_ & 0x00000040) != 0)) {
result.nodeAttributes_ = nodeAttributesBuilder_ == null
? nodeAttributes_
: nodeAttributesBuilder_.build();
to_bitField0_ |= 0x00000010;
}
if (((from_bitField0_ & 0x00000080) != 0)) {
result.tokenSequenceNo_ = tokenSequenceNo_;
to_bitField0_ |= 0x00000020;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto) {
return mergeFrom((org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto.getDefaultInstance()) return this;
if (other.hasNodeStatus()) {
mergeNodeStatus(other.getNodeStatus());
}
if (other.hasLastKnownContainerTokenMasterKey()) {
mergeLastKnownContainerTokenMasterKey(other.getLastKnownContainerTokenMasterKey());
}
if (other.hasLastKnownNmTokenMasterKey()) {
mergeLastKnownNmTokenMasterKey(other.getLastKnownNmTokenMasterKey());
}
if (other.hasNodeLabels()) {
mergeNodeLabels(other.getNodeLabels());
}
if (logAggregationReportsForAppsBuilder_ == null) {
if (!other.logAggregationReportsForApps_.isEmpty()) {
if (logAggregationReportsForApps_.isEmpty()) {
logAggregationReportsForApps_ = other.logAggregationReportsForApps_;
bitField0_ = (bitField0_ & ~0x00000010);
} else {
ensureLogAggregationReportsForAppsIsMutable();
logAggregationReportsForApps_.addAll(other.logAggregationReportsForApps_);
}
onChanged();
}
} else {
if (!other.logAggregationReportsForApps_.isEmpty()) {
if (logAggregationReportsForAppsBuilder_.isEmpty()) {
logAggregationReportsForAppsBuilder_.dispose();
logAggregationReportsForAppsBuilder_ = null;
logAggregationReportsForApps_ = other.logAggregationReportsForApps_;
bitField0_ = (bitField0_ & ~0x00000010);
logAggregationReportsForAppsBuilder_ =
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getLogAggregationReportsForAppsFieldBuilder() : null;
} else {
logAggregationReportsForAppsBuilder_.addAllMessages(other.logAggregationReportsForApps_);
}
}
}
if (registeringCollectorsBuilder_ == null) {
if (!other.registeringCollectors_.isEmpty()) {
if (registeringCollectors_.isEmpty()) {
registeringCollectors_ = other.registeringCollectors_;
bitField0_ = (bitField0_ & ~0x00000020);
} else {
ensureRegisteringCollectorsIsMutable();
registeringCollectors_.addAll(other.registeringCollectors_);
}
onChanged();
}
} else {
if (!other.registeringCollectors_.isEmpty()) {
if (registeringCollectorsBuilder_.isEmpty()) {
registeringCollectorsBuilder_.dispose();
registeringCollectorsBuilder_ = null;
registeringCollectors_ = other.registeringCollectors_;
bitField0_ = (bitField0_ & ~0x00000020);
registeringCollectorsBuilder_ =
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getRegisteringCollectorsFieldBuilder() : null;
} else {
registeringCollectorsBuilder_.addAllMessages(other.registeringCollectors_);
}
}
}
if (other.hasNodeAttributes()) {
mergeNodeAttributes(other.getNodeAttributes());
}
if (other.hasTokenSequenceNo()) {
setTokenSequenceNo(other.getTokenSequenceNo());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (hasNodeStatus()) {
if (!getNodeStatus().isInitialized()) {
return false;
}
}
for (int i = 0; i < getRegisteringCollectorsCount(); i++) {
if (!getRegisteringCollectors(i).isInitialized()) {
return false;
}
}
if (hasNodeAttributes()) {
if (!getNodeAttributes().isInitialized()) {
return false;
}
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getNodeStatusFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
input.readMessage(
getLastKnownContainerTokenMasterKeyFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26: {
input.readMessage(
getLastKnownNmTokenMasterKeyFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
case 34: {
input.readMessage(
getNodeLabelsFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000008;
break;
} // case 34
case 42: {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto m =
input.readMessage(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto.PARSER,
extensionRegistry);
if (logAggregationReportsForAppsBuilder_ == null) {
ensureLogAggregationReportsForAppsIsMutable();
logAggregationReportsForApps_.add(m);
} else {
logAggregationReportsForAppsBuilder_.addMessage(m);
}
break;
} // case 42
case 50: {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto m =
input.readMessage(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.PARSER,
extensionRegistry);
if (registeringCollectorsBuilder_ == null) {
ensureRegisteringCollectorsIsMutable();
registeringCollectors_.add(m);
} else {
registeringCollectorsBuilder_.addMessage(m);
}
break;
} // case 50
case 58: {
input.readMessage(
getNodeAttributesFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000040;
break;
} // case 58
case 64: {
tokenSequenceNo_ = input.readInt64();
bitField0_ |= 0x00000080;
break;
} // case 64
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto nodeStatus_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProtoOrBuilder> nodeStatusBuilder_;
/**
* optional .hadoop.yarn.NodeStatusProto node_status = 1;
* @return Whether the nodeStatus field is set.
*/
public boolean hasNodeStatus() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.NodeStatusProto node_status = 1;
* @return The nodeStatus.
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto getNodeStatus() {
if (nodeStatusBuilder_ == null) {
return nodeStatus_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto.getDefaultInstance() : nodeStatus_;
} else {
return nodeStatusBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.NodeStatusProto node_status = 1;
*/
public Builder setNodeStatus(org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto value) {
if (nodeStatusBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
nodeStatus_ = value;
} else {
nodeStatusBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeStatusProto node_status = 1;
*/
public Builder setNodeStatus(
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto.Builder builderForValue) {
if (nodeStatusBuilder_ == null) {
nodeStatus_ = builderForValue.build();
} else {
nodeStatusBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeStatusProto node_status = 1;
*/
public Builder mergeNodeStatus(org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto value) {
if (nodeStatusBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
nodeStatus_ != null &&
nodeStatus_ != org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto.getDefaultInstance()) {
getNodeStatusBuilder().mergeFrom(value);
} else {
nodeStatus_ = value;
}
} else {
nodeStatusBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeStatusProto node_status = 1;
*/
public Builder clearNodeStatus() {
bitField0_ = (bitField0_ & ~0x00000001);
nodeStatus_ = null;
if (nodeStatusBuilder_ != null) {
nodeStatusBuilder_.dispose();
nodeStatusBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeStatusProto node_status = 1;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto.Builder getNodeStatusBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getNodeStatusFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.NodeStatusProto node_status = 1;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProtoOrBuilder getNodeStatusOrBuilder() {
if (nodeStatusBuilder_ != null) {
return nodeStatusBuilder_.getMessageOrBuilder();
} else {
return nodeStatus_ == null ?
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto.getDefaultInstance() : nodeStatus_;
}
}
/**
* optional .hadoop.yarn.NodeStatusProto node_status = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProtoOrBuilder>
getNodeStatusFieldBuilder() {
if (nodeStatusBuilder_ == null) {
nodeStatusBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProtoOrBuilder>(
getNodeStatus(),
getParentForChildren(),
isClean());
nodeStatus_ = null;
}
return nodeStatusBuilder_;
}
private org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto lastKnownContainerTokenMasterKey_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder> lastKnownContainerTokenMasterKeyBuilder_;
/**
* optional .hadoop.yarn.MasterKeyProto last_known_container_token_master_key = 2;
* @return Whether the lastKnownContainerTokenMasterKey field is set.
*/
public boolean hasLastKnownContainerTokenMasterKey() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional .hadoop.yarn.MasterKeyProto last_known_container_token_master_key = 2;
* @return The lastKnownContainerTokenMasterKey.
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto getLastKnownContainerTokenMasterKey() {
if (lastKnownContainerTokenMasterKeyBuilder_ == null) {
return lastKnownContainerTokenMasterKey_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.getDefaultInstance() : lastKnownContainerTokenMasterKey_;
} else {
return lastKnownContainerTokenMasterKeyBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.MasterKeyProto last_known_container_token_master_key = 2;
*/
public Builder setLastKnownContainerTokenMasterKey(org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto value) {
if (lastKnownContainerTokenMasterKeyBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
lastKnownContainerTokenMasterKey_ = value;
} else {
lastKnownContainerTokenMasterKeyBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.MasterKeyProto last_known_container_token_master_key = 2;
*/
public Builder setLastKnownContainerTokenMasterKey(
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.Builder builderForValue) {
if (lastKnownContainerTokenMasterKeyBuilder_ == null) {
lastKnownContainerTokenMasterKey_ = builderForValue.build();
} else {
lastKnownContainerTokenMasterKeyBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.MasterKeyProto last_known_container_token_master_key = 2;
*/
public Builder mergeLastKnownContainerTokenMasterKey(org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto value) {
if (lastKnownContainerTokenMasterKeyBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0) &&
lastKnownContainerTokenMasterKey_ != null &&
lastKnownContainerTokenMasterKey_ != org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.getDefaultInstance()) {
getLastKnownContainerTokenMasterKeyBuilder().mergeFrom(value);
} else {
lastKnownContainerTokenMasterKey_ = value;
}
} else {
lastKnownContainerTokenMasterKeyBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.MasterKeyProto last_known_container_token_master_key = 2;
*/
public Builder clearLastKnownContainerTokenMasterKey() {
bitField0_ = (bitField0_ & ~0x00000002);
lastKnownContainerTokenMasterKey_ = null;
if (lastKnownContainerTokenMasterKeyBuilder_ != null) {
lastKnownContainerTokenMasterKeyBuilder_.dispose();
lastKnownContainerTokenMasterKeyBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.MasterKeyProto last_known_container_token_master_key = 2;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.Builder getLastKnownContainerTokenMasterKeyBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getLastKnownContainerTokenMasterKeyFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.MasterKeyProto last_known_container_token_master_key = 2;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder getLastKnownContainerTokenMasterKeyOrBuilder() {
if (lastKnownContainerTokenMasterKeyBuilder_ != null) {
return lastKnownContainerTokenMasterKeyBuilder_.getMessageOrBuilder();
} else {
return lastKnownContainerTokenMasterKey_ == null ?
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.getDefaultInstance() : lastKnownContainerTokenMasterKey_;
}
}
/**
* optional .hadoop.yarn.MasterKeyProto last_known_container_token_master_key = 2;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder>
getLastKnownContainerTokenMasterKeyFieldBuilder() {
if (lastKnownContainerTokenMasterKeyBuilder_ == null) {
lastKnownContainerTokenMasterKeyBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder>(
getLastKnownContainerTokenMasterKey(),
getParentForChildren(),
isClean());
lastKnownContainerTokenMasterKey_ = null;
}
return lastKnownContainerTokenMasterKeyBuilder_;
}
private org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto lastKnownNmTokenMasterKey_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder> lastKnownNmTokenMasterKeyBuilder_;
/**
* optional .hadoop.yarn.MasterKeyProto last_known_nm_token_master_key = 3;
* @return Whether the lastKnownNmTokenMasterKey field is set.
*/
public boolean hasLastKnownNmTokenMasterKey() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional .hadoop.yarn.MasterKeyProto last_known_nm_token_master_key = 3;
* @return The lastKnownNmTokenMasterKey.
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto getLastKnownNmTokenMasterKey() {
if (lastKnownNmTokenMasterKeyBuilder_ == null) {
return lastKnownNmTokenMasterKey_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.getDefaultInstance() : lastKnownNmTokenMasterKey_;
} else {
return lastKnownNmTokenMasterKeyBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.MasterKeyProto last_known_nm_token_master_key = 3;
*/
public Builder setLastKnownNmTokenMasterKey(org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto value) {
if (lastKnownNmTokenMasterKeyBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
lastKnownNmTokenMasterKey_ = value;
} else {
lastKnownNmTokenMasterKeyBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.MasterKeyProto last_known_nm_token_master_key = 3;
*/
public Builder setLastKnownNmTokenMasterKey(
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.Builder builderForValue) {
if (lastKnownNmTokenMasterKeyBuilder_ == null) {
lastKnownNmTokenMasterKey_ = builderForValue.build();
} else {
lastKnownNmTokenMasterKeyBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.MasterKeyProto last_known_nm_token_master_key = 3;
*/
public Builder mergeLastKnownNmTokenMasterKey(org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto value) {
if (lastKnownNmTokenMasterKeyBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0) &&
lastKnownNmTokenMasterKey_ != null &&
lastKnownNmTokenMasterKey_ != org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.getDefaultInstance()) {
getLastKnownNmTokenMasterKeyBuilder().mergeFrom(value);
} else {
lastKnownNmTokenMasterKey_ = value;
}
} else {
lastKnownNmTokenMasterKeyBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.MasterKeyProto last_known_nm_token_master_key = 3;
*/
public Builder clearLastKnownNmTokenMasterKey() {
bitField0_ = (bitField0_ & ~0x00000004);
lastKnownNmTokenMasterKey_ = null;
if (lastKnownNmTokenMasterKeyBuilder_ != null) {
lastKnownNmTokenMasterKeyBuilder_.dispose();
lastKnownNmTokenMasterKeyBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.MasterKeyProto last_known_nm_token_master_key = 3;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.Builder getLastKnownNmTokenMasterKeyBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getLastKnownNmTokenMasterKeyFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.MasterKeyProto last_known_nm_token_master_key = 3;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder getLastKnownNmTokenMasterKeyOrBuilder() {
if (lastKnownNmTokenMasterKeyBuilder_ != null) {
return lastKnownNmTokenMasterKeyBuilder_.getMessageOrBuilder();
} else {
return lastKnownNmTokenMasterKey_ == null ?
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.getDefaultInstance() : lastKnownNmTokenMasterKey_;
}
}
/**
* optional .hadoop.yarn.MasterKeyProto last_known_nm_token_master_key = 3;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder>
getLastKnownNmTokenMasterKeyFieldBuilder() {
if (lastKnownNmTokenMasterKeyBuilder_ == null) {
lastKnownNmTokenMasterKeyBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder>(
getLastKnownNmTokenMasterKey(),
getParentForChildren(),
isClean());
lastKnownNmTokenMasterKey_ = null;
}
return lastKnownNmTokenMasterKeyBuilder_;
}
private org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto nodeLabels_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProtoOrBuilder> nodeLabelsBuilder_;
/**
* optional .hadoop.yarn.NodeLabelsProto nodeLabels = 4;
* @return Whether the nodeLabels field is set.
*/
public boolean hasNodeLabels() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* optional .hadoop.yarn.NodeLabelsProto nodeLabels = 4;
* @return The nodeLabels.
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto getNodeLabels() {
if (nodeLabelsBuilder_ == null) {
return nodeLabels_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto.getDefaultInstance() : nodeLabels_;
} else {
return nodeLabelsBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.NodeLabelsProto nodeLabels = 4;
*/
public Builder setNodeLabels(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto value) {
if (nodeLabelsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
nodeLabels_ = value;
} else {
nodeLabelsBuilder_.setMessage(value);
}
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeLabelsProto nodeLabels = 4;
*/
public Builder setNodeLabels(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto.Builder builderForValue) {
if (nodeLabelsBuilder_ == null) {
nodeLabels_ = builderForValue.build();
} else {
nodeLabelsBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeLabelsProto nodeLabels = 4;
*/
public Builder mergeNodeLabels(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto value) {
if (nodeLabelsBuilder_ == null) {
if (((bitField0_ & 0x00000008) != 0) &&
nodeLabels_ != null &&
nodeLabels_ != org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto.getDefaultInstance()) {
getNodeLabelsBuilder().mergeFrom(value);
} else {
nodeLabels_ = value;
}
} else {
nodeLabelsBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeLabelsProto nodeLabels = 4;
*/
public Builder clearNodeLabels() {
bitField0_ = (bitField0_ & ~0x00000008);
nodeLabels_ = null;
if (nodeLabelsBuilder_ != null) {
nodeLabelsBuilder_.dispose();
nodeLabelsBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeLabelsProto nodeLabels = 4;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto.Builder getNodeLabelsBuilder() {
bitField0_ |= 0x00000008;
onChanged();
return getNodeLabelsFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.NodeLabelsProto nodeLabels = 4;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProtoOrBuilder getNodeLabelsOrBuilder() {
if (nodeLabelsBuilder_ != null) {
return nodeLabelsBuilder_.getMessageOrBuilder();
} else {
return nodeLabels_ == null ?
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto.getDefaultInstance() : nodeLabels_;
}
}
/**
* optional .hadoop.yarn.NodeLabelsProto nodeLabels = 4;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProtoOrBuilder>
getNodeLabelsFieldBuilder() {
if (nodeLabelsBuilder_ == null) {
nodeLabelsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeLabelsProtoOrBuilder>(
getNodeLabels(),
getParentForChildren(),
isClean());
nodeLabels_ = null;
}
return nodeLabelsBuilder_;
}
private java.util.List logAggregationReportsForApps_ =
java.util.Collections.emptyList();
private void ensureLogAggregationReportsForAppsIsMutable() {
if (!((bitField0_ & 0x00000010) != 0)) {
logAggregationReportsForApps_ = new java.util.ArrayList(logAggregationReportsForApps_);
bitField0_ |= 0x00000010;
}
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProtoOrBuilder> logAggregationReportsForAppsBuilder_;
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 5;
*/
public java.util.List getLogAggregationReportsForAppsList() {
if (logAggregationReportsForAppsBuilder_ == null) {
return java.util.Collections.unmodifiableList(logAggregationReportsForApps_);
} else {
return logAggregationReportsForAppsBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 5;
*/
public int getLogAggregationReportsForAppsCount() {
if (logAggregationReportsForAppsBuilder_ == null) {
return logAggregationReportsForApps_.size();
} else {
return logAggregationReportsForAppsBuilder_.getCount();
}
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 5;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto getLogAggregationReportsForApps(int index) {
if (logAggregationReportsForAppsBuilder_ == null) {
return logAggregationReportsForApps_.get(index);
} else {
return logAggregationReportsForAppsBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 5;
*/
public Builder setLogAggregationReportsForApps(
int index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto value) {
if (logAggregationReportsForAppsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureLogAggregationReportsForAppsIsMutable();
logAggregationReportsForApps_.set(index, value);
onChanged();
} else {
logAggregationReportsForAppsBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 5;
*/
public Builder setLogAggregationReportsForApps(
int index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto.Builder builderForValue) {
if (logAggregationReportsForAppsBuilder_ == null) {
ensureLogAggregationReportsForAppsIsMutable();
logAggregationReportsForApps_.set(index, builderForValue.build());
onChanged();
} else {
logAggregationReportsForAppsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 5;
*/
public Builder addLogAggregationReportsForApps(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto value) {
if (logAggregationReportsForAppsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureLogAggregationReportsForAppsIsMutable();
logAggregationReportsForApps_.add(value);
onChanged();
} else {
logAggregationReportsForAppsBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 5;
*/
public Builder addLogAggregationReportsForApps(
int index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto value) {
if (logAggregationReportsForAppsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureLogAggregationReportsForAppsIsMutable();
logAggregationReportsForApps_.add(index, value);
onChanged();
} else {
logAggregationReportsForAppsBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 5;
*/
public Builder addLogAggregationReportsForApps(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto.Builder builderForValue) {
if (logAggregationReportsForAppsBuilder_ == null) {
ensureLogAggregationReportsForAppsIsMutable();
logAggregationReportsForApps_.add(builderForValue.build());
onChanged();
} else {
logAggregationReportsForAppsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 5;
*/
public Builder addLogAggregationReportsForApps(
int index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto.Builder builderForValue) {
if (logAggregationReportsForAppsBuilder_ == null) {
ensureLogAggregationReportsForAppsIsMutable();
logAggregationReportsForApps_.add(index, builderForValue.build());
onChanged();
} else {
logAggregationReportsForAppsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 5;
*/
public Builder addAllLogAggregationReportsForApps(
java.lang.Iterable extends org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto> values) {
if (logAggregationReportsForAppsBuilder_ == null) {
ensureLogAggregationReportsForAppsIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, logAggregationReportsForApps_);
onChanged();
} else {
logAggregationReportsForAppsBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 5;
*/
public Builder clearLogAggregationReportsForApps() {
if (logAggregationReportsForAppsBuilder_ == null) {
logAggregationReportsForApps_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000010);
onChanged();
} else {
logAggregationReportsForAppsBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 5;
*/
public Builder removeLogAggregationReportsForApps(int index) {
if (logAggregationReportsForAppsBuilder_ == null) {
ensureLogAggregationReportsForAppsIsMutable();
logAggregationReportsForApps_.remove(index);
onChanged();
} else {
logAggregationReportsForAppsBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 5;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto.Builder getLogAggregationReportsForAppsBuilder(
int index) {
return getLogAggregationReportsForAppsFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 5;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProtoOrBuilder getLogAggregationReportsForAppsOrBuilder(
int index) {
if (logAggregationReportsForAppsBuilder_ == null) {
return logAggregationReportsForApps_.get(index); } else {
return logAggregationReportsForAppsBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 5;
*/
public java.util.List extends org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProtoOrBuilder>
getLogAggregationReportsForAppsOrBuilderList() {
if (logAggregationReportsForAppsBuilder_ != null) {
return logAggregationReportsForAppsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(logAggregationReportsForApps_);
}
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 5;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto.Builder addLogAggregationReportsForAppsBuilder() {
return getLogAggregationReportsForAppsFieldBuilder().addBuilder(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 5;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto.Builder addLogAggregationReportsForAppsBuilder(
int index) {
return getLogAggregationReportsForAppsFieldBuilder().addBuilder(
index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.LogAggregationReportProto log_aggregation_reports_for_apps = 5;
*/
public java.util.List
getLogAggregationReportsForAppsBuilderList() {
return getLogAggregationReportsForAppsFieldBuilder().getBuilderList();
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProtoOrBuilder>
getLogAggregationReportsForAppsFieldBuilder() {
if (logAggregationReportsForAppsBuilder_ == null) {
logAggregationReportsForAppsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProtoOrBuilder>(
logAggregationReportsForApps_,
((bitField0_ & 0x00000010) != 0),
getParentForChildren(),
isClean());
logAggregationReportsForApps_ = null;
}
return logAggregationReportsForAppsBuilder_;
}
private java.util.List registeringCollectors_ =
java.util.Collections.emptyList();
private void ensureRegisteringCollectorsIsMutable() {
if (!((bitField0_ & 0x00000020) != 0)) {
registeringCollectors_ = new java.util.ArrayList(registeringCollectors_);
bitField0_ |= 0x00000020;
}
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProtoOrBuilder> registeringCollectorsBuilder_;
/**
* repeated .hadoop.yarn.AppCollectorDataProto registering_collectors = 6;
*/
public java.util.List getRegisteringCollectorsList() {
if (registeringCollectorsBuilder_ == null) {
return java.util.Collections.unmodifiableList(registeringCollectors_);
} else {
return registeringCollectorsBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto registering_collectors = 6;
*/
public int getRegisteringCollectorsCount() {
if (registeringCollectorsBuilder_ == null) {
return registeringCollectors_.size();
} else {
return registeringCollectorsBuilder_.getCount();
}
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto registering_collectors = 6;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto getRegisteringCollectors(int index) {
if (registeringCollectorsBuilder_ == null) {
return registeringCollectors_.get(index);
} else {
return registeringCollectorsBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto registering_collectors = 6;
*/
public Builder setRegisteringCollectors(
int index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto value) {
if (registeringCollectorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRegisteringCollectorsIsMutable();
registeringCollectors_.set(index, value);
onChanged();
} else {
registeringCollectorsBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto registering_collectors = 6;
*/
public Builder setRegisteringCollectors(
int index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.Builder builderForValue) {
if (registeringCollectorsBuilder_ == null) {
ensureRegisteringCollectorsIsMutable();
registeringCollectors_.set(index, builderForValue.build());
onChanged();
} else {
registeringCollectorsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto registering_collectors = 6;
*/
public Builder addRegisteringCollectors(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto value) {
if (registeringCollectorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRegisteringCollectorsIsMutable();
registeringCollectors_.add(value);
onChanged();
} else {
registeringCollectorsBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto registering_collectors = 6;
*/
public Builder addRegisteringCollectors(
int index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto value) {
if (registeringCollectorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRegisteringCollectorsIsMutable();
registeringCollectors_.add(index, value);
onChanged();
} else {
registeringCollectorsBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto registering_collectors = 6;
*/
public Builder addRegisteringCollectors(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.Builder builderForValue) {
if (registeringCollectorsBuilder_ == null) {
ensureRegisteringCollectorsIsMutable();
registeringCollectors_.add(builderForValue.build());
onChanged();
} else {
registeringCollectorsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto registering_collectors = 6;
*/
public Builder addRegisteringCollectors(
int index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.Builder builderForValue) {
if (registeringCollectorsBuilder_ == null) {
ensureRegisteringCollectorsIsMutable();
registeringCollectors_.add(index, builderForValue.build());
onChanged();
} else {
registeringCollectorsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto registering_collectors = 6;
*/
public Builder addAllRegisteringCollectors(
java.lang.Iterable extends org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto> values) {
if (registeringCollectorsBuilder_ == null) {
ensureRegisteringCollectorsIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, registeringCollectors_);
onChanged();
} else {
registeringCollectorsBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto registering_collectors = 6;
*/
public Builder clearRegisteringCollectors() {
if (registeringCollectorsBuilder_ == null) {
registeringCollectors_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000020);
onChanged();
} else {
registeringCollectorsBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto registering_collectors = 6;
*/
public Builder removeRegisteringCollectors(int index) {
if (registeringCollectorsBuilder_ == null) {
ensureRegisteringCollectorsIsMutable();
registeringCollectors_.remove(index);
onChanged();
} else {
registeringCollectorsBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto registering_collectors = 6;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.Builder getRegisteringCollectorsBuilder(
int index) {
return getRegisteringCollectorsFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto registering_collectors = 6;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProtoOrBuilder getRegisteringCollectorsOrBuilder(
int index) {
if (registeringCollectorsBuilder_ == null) {
return registeringCollectors_.get(index); } else {
return registeringCollectorsBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto registering_collectors = 6;
*/
public java.util.List extends org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProtoOrBuilder>
getRegisteringCollectorsOrBuilderList() {
if (registeringCollectorsBuilder_ != null) {
return registeringCollectorsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(registeringCollectors_);
}
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto registering_collectors = 6;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.Builder addRegisteringCollectorsBuilder() {
return getRegisteringCollectorsFieldBuilder().addBuilder(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto registering_collectors = 6;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.Builder addRegisteringCollectorsBuilder(
int index) {
return getRegisteringCollectorsFieldBuilder().addBuilder(
index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto registering_collectors = 6;
*/
public java.util.List
getRegisteringCollectorsBuilderList() {
return getRegisteringCollectorsFieldBuilder().getBuilderList();
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProtoOrBuilder>
getRegisteringCollectorsFieldBuilder() {
if (registeringCollectorsBuilder_ == null) {
registeringCollectorsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProtoOrBuilder>(
registeringCollectors_,
((bitField0_ & 0x00000020) != 0),
getParentForChildren(),
isClean());
registeringCollectors_ = null;
}
return registeringCollectorsBuilder_;
}
private org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto nodeAttributes_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProtoOrBuilder> nodeAttributesBuilder_;
/**
* optional .hadoop.yarn.NodeAttributesProto nodeAttributes = 7;
* @return Whether the nodeAttributes field is set.
*/
public boolean hasNodeAttributes() {
return ((bitField0_ & 0x00000040) != 0);
}
/**
* optional .hadoop.yarn.NodeAttributesProto nodeAttributes = 7;
* @return The nodeAttributes.
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto getNodeAttributes() {
if (nodeAttributesBuilder_ == null) {
return nodeAttributes_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto.getDefaultInstance() : nodeAttributes_;
} else {
return nodeAttributesBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.NodeAttributesProto nodeAttributes = 7;
*/
public Builder setNodeAttributes(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto value) {
if (nodeAttributesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
nodeAttributes_ = value;
} else {
nodeAttributesBuilder_.setMessage(value);
}
bitField0_ |= 0x00000040;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeAttributesProto nodeAttributes = 7;
*/
public Builder setNodeAttributes(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto.Builder builderForValue) {
if (nodeAttributesBuilder_ == null) {
nodeAttributes_ = builderForValue.build();
} else {
nodeAttributesBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000040;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeAttributesProto nodeAttributes = 7;
*/
public Builder mergeNodeAttributes(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto value) {
if (nodeAttributesBuilder_ == null) {
if (((bitField0_ & 0x00000040) != 0) &&
nodeAttributes_ != null &&
nodeAttributes_ != org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto.getDefaultInstance()) {
getNodeAttributesBuilder().mergeFrom(value);
} else {
nodeAttributes_ = value;
}
} else {
nodeAttributesBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000040;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeAttributesProto nodeAttributes = 7;
*/
public Builder clearNodeAttributes() {
bitField0_ = (bitField0_ & ~0x00000040);
nodeAttributes_ = null;
if (nodeAttributesBuilder_ != null) {
nodeAttributesBuilder_.dispose();
nodeAttributesBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeAttributesProto nodeAttributes = 7;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto.Builder getNodeAttributesBuilder() {
bitField0_ |= 0x00000040;
onChanged();
return getNodeAttributesFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.NodeAttributesProto nodeAttributes = 7;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProtoOrBuilder getNodeAttributesOrBuilder() {
if (nodeAttributesBuilder_ != null) {
return nodeAttributesBuilder_.getMessageOrBuilder();
} else {
return nodeAttributes_ == null ?
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto.getDefaultInstance() : nodeAttributes_;
}
}
/**
* optional .hadoop.yarn.NodeAttributesProto nodeAttributes = 7;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProtoOrBuilder>
getNodeAttributesFieldBuilder() {
if (nodeAttributesBuilder_ == null) {
nodeAttributesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeAttributesProtoOrBuilder>(
getNodeAttributes(),
getParentForChildren(),
isClean());
nodeAttributes_ = null;
}
return nodeAttributesBuilder_;
}
private long tokenSequenceNo_ ;
/**
* optional int64 tokenSequenceNo = 8;
* @return Whether the tokenSequenceNo field is set.
*/
@java.lang.Override
public boolean hasTokenSequenceNo() {
return ((bitField0_ & 0x00000080) != 0);
}
/**
* optional int64 tokenSequenceNo = 8;
* @return The tokenSequenceNo.
*/
@java.lang.Override
public long getTokenSequenceNo() {
return tokenSequenceNo_;
}
/**
* optional int64 tokenSequenceNo = 8;
* @param value The tokenSequenceNo to set.
* @return This builder for chaining.
*/
public Builder setTokenSequenceNo(long value) {
tokenSequenceNo_ = value;
bitField0_ |= 0x00000080;
onChanged();
return this;
}
/**
* optional int64 tokenSequenceNo = 8;
* @return This builder for chaining.
*/
public Builder clearTokenSequenceNo() {
bitField0_ = (bitField0_ & ~0x00000080);
tokenSequenceNo_ = 0L;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.NodeHeartbeatRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.NodeHeartbeatRequestProto)
private static final org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto();
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public NodeHeartbeatRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface LogAggregationReportProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.LogAggregationReportProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
* @return Whether the applicationId field is set.
*/
boolean hasApplicationId();
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
* @return The applicationId.
*/
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId();
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder();
/**
* optional .hadoop.yarn.LogAggregationStatusProto log_aggregation_status = 2;
* @return Whether the logAggregationStatus field is set.
*/
boolean hasLogAggregationStatus();
/**
* optional .hadoop.yarn.LogAggregationStatusProto log_aggregation_status = 2;
* @return The logAggregationStatus.
*/
org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto getLogAggregationStatus();
/**
* optional string diagnostics = 3 [default = "N/A"];
* @return Whether the diagnostics field is set.
*/
boolean hasDiagnostics();
/**
* optional string diagnostics = 3 [default = "N/A"];
* @return The diagnostics.
*/
java.lang.String getDiagnostics();
/**
* optional string diagnostics = 3 [default = "N/A"];
* @return The bytes for diagnostics.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getDiagnosticsBytes();
}
/**
* Protobuf type {@code hadoop.yarn.LogAggregationReportProto}
*/
public static final class LogAggregationReportProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.LogAggregationReportProto)
LogAggregationReportProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use LogAggregationReportProto.newBuilder() to construct.
private LogAggregationReportProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private LogAggregationReportProto() {
logAggregationStatus_ = 1;
diagnostics_ = "N/A";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new LogAggregationReportProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_LogAggregationReportProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_LogAggregationReportProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto.Builder.class);
}
private int bitField0_;
public static final int APPLICATION_ID_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
* @return Whether the applicationId field is set.
*/
@java.lang.Override
public boolean hasApplicationId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
* @return The applicationId.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
}
public static final int LOG_AGGREGATION_STATUS_FIELD_NUMBER = 2;
private int logAggregationStatus_ = 1;
/**
* optional .hadoop.yarn.LogAggregationStatusProto log_aggregation_status = 2;
* @return Whether the logAggregationStatus field is set.
*/
@java.lang.Override public boolean hasLogAggregationStatus() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional .hadoop.yarn.LogAggregationStatusProto log_aggregation_status = 2;
* @return The logAggregationStatus.
*/
@java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto getLogAggregationStatus() {
org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto result = org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto.forNumber(logAggregationStatus_);
return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto.LOG_DISABLED : result;
}
public static final int DIAGNOSTICS_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object diagnostics_ = "N/A";
/**
* optional string diagnostics = 3 [default = "N/A"];
* @return Whether the diagnostics field is set.
*/
@java.lang.Override
public boolean hasDiagnostics() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional string diagnostics = 3 [default = "N/A"];
* @return The diagnostics.
*/
@java.lang.Override
public java.lang.String getDiagnostics() {
java.lang.Object ref = diagnostics_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
diagnostics_ = s;
}
return s;
}
}
/**
* optional string diagnostics = 3 [default = "N/A"];
* @return The bytes for diagnostics.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getDiagnosticsBytes() {
java.lang.Object ref = diagnostics_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
diagnostics_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getApplicationId());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeEnum(2, logAggregationStatus_);
}
if (((bitField0_ & 0x00000004) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, diagnostics_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getApplicationId());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeEnumSize(2, logAggregationStatus_);
}
if (((bitField0_ & 0x00000004) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, diagnostics_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto other = (org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto) obj;
if (hasApplicationId() != other.hasApplicationId()) return false;
if (hasApplicationId()) {
if (!getApplicationId()
.equals(other.getApplicationId())) return false;
}
if (hasLogAggregationStatus() != other.hasLogAggregationStatus()) return false;
if (hasLogAggregationStatus()) {
if (logAggregationStatus_ != other.logAggregationStatus_) return false;
}
if (hasDiagnostics() != other.hasDiagnostics()) return false;
if (hasDiagnostics()) {
if (!getDiagnostics()
.equals(other.getDiagnostics())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasApplicationId()) {
hash = (37 * hash) + APPLICATION_ID_FIELD_NUMBER;
hash = (53 * hash) + getApplicationId().hashCode();
}
if (hasLogAggregationStatus()) {
hash = (37 * hash) + LOG_AGGREGATION_STATUS_FIELD_NUMBER;
hash = (53 * hash) + logAggregationStatus_;
}
if (hasDiagnostics()) {
hash = (37 * hash) + DIAGNOSTICS_FIELD_NUMBER;
hash = (53 * hash) + getDiagnostics().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.LogAggregationReportProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.LogAggregationReportProto)
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_LogAggregationReportProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_LogAggregationReportProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getApplicationIdFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
applicationId_ = null;
if (applicationIdBuilder_ != null) {
applicationIdBuilder_.dispose();
applicationIdBuilder_ = null;
}
logAggregationStatus_ = 1;
diagnostics_ = "N/A";
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_LogAggregationReportProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto build() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto result = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.applicationId_ = applicationIdBuilder_ == null
? applicationId_
: applicationIdBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.logAggregationStatus_ = logAggregationStatus_;
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.diagnostics_ = diagnostics_;
to_bitField0_ |= 0x00000004;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto) {
return mergeFrom((org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto.getDefaultInstance()) return this;
if (other.hasApplicationId()) {
mergeApplicationId(other.getApplicationId());
}
if (other.hasLogAggregationStatus()) {
setLogAggregationStatus(other.getLogAggregationStatus());
}
if (other.hasDiagnostics()) {
diagnostics_ = other.diagnostics_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getApplicationIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 16: {
int tmpRaw = input.readEnum();
org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto tmpValue =
org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto.forNumber(tmpRaw);
if (tmpValue == null) {
mergeUnknownVarintField(2, tmpRaw);
} else {
logAggregationStatus_ = tmpRaw;
bitField0_ |= 0x00000002;
}
break;
} // case 16
case 26: {
diagnostics_ = input.readBytes();
bitField0_ |= 0x00000004;
break;
} // case 26
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> applicationIdBuilder_;
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
* @return Whether the applicationId field is set.
*/
public boolean hasApplicationId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
* @return The applicationId.
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
if (applicationIdBuilder_ == null) {
return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
} else {
return applicationIdBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
public Builder setApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
if (applicationIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
applicationId_ = value;
} else {
applicationIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
public Builder setApplicationId(
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) {
if (applicationIdBuilder_ == null) {
applicationId_ = builderForValue.build();
} else {
applicationIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
public Builder mergeApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
if (applicationIdBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
applicationId_ != null &&
applicationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) {
getApplicationIdBuilder().mergeFrom(value);
} else {
applicationId_ = value;
}
} else {
applicationIdBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
public Builder clearApplicationId() {
bitField0_ = (bitField0_ & ~0x00000001);
applicationId_ = null;
if (applicationIdBuilder_ != null) {
applicationIdBuilder_.dispose();
applicationIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getApplicationIdBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getApplicationIdFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
if (applicationIdBuilder_ != null) {
return applicationIdBuilder_.getMessageOrBuilder();
} else {
return applicationId_ == null ?
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
}
}
/**
* optional .hadoop.yarn.ApplicationIdProto application_id = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>
getApplicationIdFieldBuilder() {
if (applicationIdBuilder_ == null) {
applicationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>(
getApplicationId(),
getParentForChildren(),
isClean());
applicationId_ = null;
}
return applicationIdBuilder_;
}
private int logAggregationStatus_ = 1;
/**
* optional .hadoop.yarn.LogAggregationStatusProto log_aggregation_status = 2;
* @return Whether the logAggregationStatus field is set.
*/
@java.lang.Override public boolean hasLogAggregationStatus() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional .hadoop.yarn.LogAggregationStatusProto log_aggregation_status = 2;
* @return The logAggregationStatus.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto getLogAggregationStatus() {
org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto result = org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto.forNumber(logAggregationStatus_);
return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto.LOG_DISABLED : result;
}
/**
* optional .hadoop.yarn.LogAggregationStatusProto log_aggregation_status = 2;
* @param value The logAggregationStatus to set.
* @return This builder for chaining.
*/
public Builder setLogAggregationStatus(org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
logAggregationStatus_ = value.getNumber();
onChanged();
return this;
}
/**
* optional .hadoop.yarn.LogAggregationStatusProto log_aggregation_status = 2;
* @return This builder for chaining.
*/
public Builder clearLogAggregationStatus() {
bitField0_ = (bitField0_ & ~0x00000002);
logAggregationStatus_ = 1;
onChanged();
return this;
}
private java.lang.Object diagnostics_ = "N/A";
/**
* optional string diagnostics = 3 [default = "N/A"];
* @return Whether the diagnostics field is set.
*/
public boolean hasDiagnostics() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional string diagnostics = 3 [default = "N/A"];
* @return The diagnostics.
*/
public java.lang.String getDiagnostics() {
java.lang.Object ref = diagnostics_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
diagnostics_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string diagnostics = 3 [default = "N/A"];
* @return The bytes for diagnostics.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getDiagnosticsBytes() {
java.lang.Object ref = diagnostics_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
diagnostics_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string diagnostics = 3 [default = "N/A"];
* @param value The diagnostics to set.
* @return This builder for chaining.
*/
public Builder setDiagnostics(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
diagnostics_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* optional string diagnostics = 3 [default = "N/A"];
* @return This builder for chaining.
*/
public Builder clearDiagnostics() {
diagnostics_ = getDefaultInstance().getDiagnostics();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
* optional string diagnostics = 3 [default = "N/A"];
* @param value The bytes for diagnostics to set.
* @return This builder for chaining.
*/
public Builder setDiagnosticsBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
diagnostics_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.LogAggregationReportProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.LogAggregationReportProto)
private static final org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto();
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public LogAggregationReportProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.LogAggregationReportProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface NodeHeartbeatResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.NodeHeartbeatResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional int32 response_id = 1;
* @return Whether the responseId field is set.
*/
boolean hasResponseId();
/**
* optional int32 response_id = 1;
* @return The responseId.
*/
int getResponseId();
/**
* optional .hadoop.yarn.MasterKeyProto container_token_master_key = 2;
* @return Whether the containerTokenMasterKey field is set.
*/
boolean hasContainerTokenMasterKey();
/**
* optional .hadoop.yarn.MasterKeyProto container_token_master_key = 2;
* @return The containerTokenMasterKey.
*/
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto getContainerTokenMasterKey();
/**
* optional .hadoop.yarn.MasterKeyProto container_token_master_key = 2;
*/
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder getContainerTokenMasterKeyOrBuilder();
/**
* optional .hadoop.yarn.MasterKeyProto nm_token_master_key = 3;
* @return Whether the nmTokenMasterKey field is set.
*/
boolean hasNmTokenMasterKey();
/**
* optional .hadoop.yarn.MasterKeyProto nm_token_master_key = 3;
* @return The nmTokenMasterKey.
*/
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto getNmTokenMasterKey();
/**
* optional .hadoop.yarn.MasterKeyProto nm_token_master_key = 3;
*/
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder getNmTokenMasterKeyOrBuilder();
/**
* optional .hadoop.yarn.NodeActionProto nodeAction = 4;
* @return Whether the nodeAction field is set.
*/
boolean hasNodeAction();
/**
* optional .hadoop.yarn.NodeActionProto nodeAction = 4;
* @return The nodeAction.
*/
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeActionProto getNodeAction();
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_cleanup = 5;
*/
java.util.List
getContainersToCleanupList();
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_cleanup = 5;
*/
org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainersToCleanup(int index);
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_cleanup = 5;
*/
int getContainersToCleanupCount();
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_cleanup = 5;
*/
java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>
getContainersToCleanupOrBuilderList();
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_cleanup = 5;
*/
org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainersToCleanupOrBuilder(
int index);
/**
* repeated .hadoop.yarn.ApplicationIdProto applications_to_cleanup = 6;
*/
java.util.List
getApplicationsToCleanupList();
/**
* repeated .hadoop.yarn.ApplicationIdProto applications_to_cleanup = 6;
*/
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationsToCleanup(int index);
/**
* repeated .hadoop.yarn.ApplicationIdProto applications_to_cleanup = 6;
*/
int getApplicationsToCleanupCount();
/**
* repeated .hadoop.yarn.ApplicationIdProto applications_to_cleanup = 6;
*/
java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>
getApplicationsToCleanupOrBuilderList();
/**
* repeated .hadoop.yarn.ApplicationIdProto applications_to_cleanup = 6;
*/
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationsToCleanupOrBuilder(
int index);
/**
* optional int64 nextHeartBeatInterval = 7;
* @return Whether the nextHeartBeatInterval field is set.
*/
boolean hasNextHeartBeatInterval();
/**
* optional int64 nextHeartBeatInterval = 7;
* @return The nextHeartBeatInterval.
*/
long getNextHeartBeatInterval();
/**
* optional string diagnostics_message = 8;
* @return Whether the diagnosticsMessage field is set.
*/
boolean hasDiagnosticsMessage();
/**
* optional string diagnostics_message = 8;
* @return The diagnosticsMessage.
*/
java.lang.String getDiagnosticsMessage();
/**
* optional string diagnostics_message = 8;
* @return The bytes for diagnosticsMessage.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getDiagnosticsMessageBytes();
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_be_removed_from_nm = 9;
*/
java.util.List
getContainersToBeRemovedFromNmList();
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_be_removed_from_nm = 9;
*/
org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainersToBeRemovedFromNm(int index);
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_be_removed_from_nm = 9;
*/
int getContainersToBeRemovedFromNmCount();
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_be_removed_from_nm = 9;
*/
java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>
getContainersToBeRemovedFromNmOrBuilderList();
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_be_removed_from_nm = 9;
*/
org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainersToBeRemovedFromNmOrBuilder(
int index);
/**
* repeated .hadoop.yarn.SystemCredentialsForAppsProto system_credentials_for_apps = 10;
*/
java.util.List
getSystemCredentialsForAppsList();
/**
* repeated .hadoop.yarn.SystemCredentialsForAppsProto system_credentials_for_apps = 10;
*/
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto getSystemCredentialsForApps(int index);
/**
* repeated .hadoop.yarn.SystemCredentialsForAppsProto system_credentials_for_apps = 10;
*/
int getSystemCredentialsForAppsCount();
/**
* repeated .hadoop.yarn.SystemCredentialsForAppsProto system_credentials_for_apps = 10;
*/
java.util.List extends org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProtoOrBuilder>
getSystemCredentialsForAppsOrBuilderList();
/**
* repeated .hadoop.yarn.SystemCredentialsForAppsProto system_credentials_for_apps = 10;
*/
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProtoOrBuilder getSystemCredentialsForAppsOrBuilder(
int index);
/**
* optional bool areNodeLabelsAcceptedByRM = 11 [default = false];
* @return Whether the areNodeLabelsAcceptedByRM field is set.
*/
boolean hasAreNodeLabelsAcceptedByRM();
/**
* optional bool areNodeLabelsAcceptedByRM = 11 [default = false];
* @return The areNodeLabelsAcceptedByRM.
*/
boolean getAreNodeLabelsAcceptedByRM();
/**
*
* to be deprecated in favour of containers_to_update
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_decrease = 12;
*/
java.util.List
getContainersToDecreaseList();
/**
*
* to be deprecated in favour of containers_to_update
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_decrease = 12;
*/
org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getContainersToDecrease(int index);
/**
*
* to be deprecated in favour of containers_to_update
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_decrease = 12;
*/
int getContainersToDecreaseCount();
/**
*
* to be deprecated in favour of containers_to_update
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_decrease = 12;
*/
java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder>
getContainersToDecreaseOrBuilderList();
/**
*
* to be deprecated in favour of containers_to_update
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_decrease = 12;
*/
org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder getContainersToDecreaseOrBuilder(
int index);
/**
* repeated .hadoop.yarn.SignalContainerRequestProto containers_to_signal = 13;
*/
java.util.List
getContainersToSignalList();
/**
* repeated .hadoop.yarn.SignalContainerRequestProto containers_to_signal = 13;
*/
org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto getContainersToSignal(int index);
/**
* repeated .hadoop.yarn.SignalContainerRequestProto containers_to_signal = 13;
*/
int getContainersToSignalCount();
/**
* repeated .hadoop.yarn.SignalContainerRequestProto containers_to_signal = 13;
*/
java.util.List extends org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProtoOrBuilder>
getContainersToSignalOrBuilderList();
/**
* repeated .hadoop.yarn.SignalContainerRequestProto containers_to_signal = 13;
*/
org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProtoOrBuilder getContainersToSignalOrBuilder(
int index);
/**
* optional .hadoop.yarn.ResourceProto resource = 14;
* @return Whether the resource field is set.
*/
boolean hasResource();
/**
* optional .hadoop.yarn.ResourceProto resource = 14;
* @return The resource.
*/
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource();
/**
* optional .hadoop.yarn.ResourceProto resource = 14;
*/
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder();
/**
* optional .hadoop.yarn.ContainerQueuingLimitProto container_queuing_limit = 15;
* @return Whether the containerQueuingLimit field is set.
*/
boolean hasContainerQueuingLimit();
/**
* optional .hadoop.yarn.ContainerQueuingLimitProto container_queuing_limit = 15;
* @return The containerQueuingLimit.
*/
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto getContainerQueuingLimit();
/**
* optional .hadoop.yarn.ContainerQueuingLimitProto container_queuing_limit = 15;
*/
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProtoOrBuilder getContainerQueuingLimitOrBuilder();
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 16;
*/
java.util.List
getAppCollectorsList();
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 16;
*/
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto getAppCollectors(int index);
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 16;
*/
int getAppCollectorsCount();
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 16;
*/
java.util.List extends org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProtoOrBuilder>
getAppCollectorsOrBuilderList();
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 16;
*/
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProtoOrBuilder getAppCollectorsOrBuilder(
int index);
/**
*
* to be used in place of containers_to_decrease
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_update = 17;
*/
java.util.List
getContainersToUpdateList();
/**
*
* to be used in place of containers_to_decrease
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_update = 17;
*/
org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getContainersToUpdate(int index);
/**
*
* to be used in place of containers_to_decrease
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_update = 17;
*/
int getContainersToUpdateCount();
/**
*
* to be used in place of containers_to_decrease
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_update = 17;
*/
java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder>
getContainersToUpdateOrBuilderList();
/**
*
* to be used in place of containers_to_decrease
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_update = 17;
*/
org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder getContainersToUpdateOrBuilder(
int index);
/**
* optional bool areNodeAttributesAcceptedByRM = 18 [default = false];
* @return Whether the areNodeAttributesAcceptedByRM field is set.
*/
boolean hasAreNodeAttributesAcceptedByRM();
/**
* optional bool areNodeAttributesAcceptedByRM = 18 [default = false];
* @return The areNodeAttributesAcceptedByRM.
*/
boolean getAreNodeAttributesAcceptedByRM();
/**
* optional int64 tokenSequenceNo = 19;
* @return Whether the tokenSequenceNo field is set.
*/
boolean hasTokenSequenceNo();
/**
* optional int64 tokenSequenceNo = 19;
* @return The tokenSequenceNo.
*/
long getTokenSequenceNo();
}
/**
* Protobuf type {@code hadoop.yarn.NodeHeartbeatResponseProto}
*/
public static final class NodeHeartbeatResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.NodeHeartbeatResponseProto)
NodeHeartbeatResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use NodeHeartbeatResponseProto.newBuilder() to construct.
private NodeHeartbeatResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private NodeHeartbeatResponseProto() {
nodeAction_ = 0;
containersToCleanup_ = java.util.Collections.emptyList();
applicationsToCleanup_ = java.util.Collections.emptyList();
diagnosticsMessage_ = "";
containersToBeRemovedFromNm_ = java.util.Collections.emptyList();
systemCredentialsForApps_ = java.util.Collections.emptyList();
containersToDecrease_ = java.util.Collections.emptyList();
containersToSignal_ = java.util.Collections.emptyList();
appCollectors_ = java.util.Collections.emptyList();
containersToUpdate_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new NodeHeartbeatResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_NodeHeartbeatResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_NodeHeartbeatResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto.Builder.class);
}
private int bitField0_;
public static final int RESPONSE_ID_FIELD_NUMBER = 1;
private int responseId_ = 0;
/**
* optional int32 response_id = 1;
* @return Whether the responseId field is set.
*/
@java.lang.Override
public boolean hasResponseId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional int32 response_id = 1;
* @return The responseId.
*/
@java.lang.Override
public int getResponseId() {
return responseId_;
}
public static final int CONTAINER_TOKEN_MASTER_KEY_FIELD_NUMBER = 2;
private org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto containerTokenMasterKey_;
/**
* optional .hadoop.yarn.MasterKeyProto container_token_master_key = 2;
* @return Whether the containerTokenMasterKey field is set.
*/
@java.lang.Override
public boolean hasContainerTokenMasterKey() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional .hadoop.yarn.MasterKeyProto container_token_master_key = 2;
* @return The containerTokenMasterKey.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto getContainerTokenMasterKey() {
return containerTokenMasterKey_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.getDefaultInstance() : containerTokenMasterKey_;
}
/**
* optional .hadoop.yarn.MasterKeyProto container_token_master_key = 2;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder getContainerTokenMasterKeyOrBuilder() {
return containerTokenMasterKey_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.getDefaultInstance() : containerTokenMasterKey_;
}
public static final int NM_TOKEN_MASTER_KEY_FIELD_NUMBER = 3;
private org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto nmTokenMasterKey_;
/**
* optional .hadoop.yarn.MasterKeyProto nm_token_master_key = 3;
* @return Whether the nmTokenMasterKey field is set.
*/
@java.lang.Override
public boolean hasNmTokenMasterKey() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional .hadoop.yarn.MasterKeyProto nm_token_master_key = 3;
* @return The nmTokenMasterKey.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto getNmTokenMasterKey() {
return nmTokenMasterKey_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.getDefaultInstance() : nmTokenMasterKey_;
}
/**
* optional .hadoop.yarn.MasterKeyProto nm_token_master_key = 3;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder getNmTokenMasterKeyOrBuilder() {
return nmTokenMasterKey_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.getDefaultInstance() : nmTokenMasterKey_;
}
public static final int NODEACTION_FIELD_NUMBER = 4;
private int nodeAction_ = 0;
/**
* optional .hadoop.yarn.NodeActionProto nodeAction = 4;
* @return Whether the nodeAction field is set.
*/
@java.lang.Override public boolean hasNodeAction() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* optional .hadoop.yarn.NodeActionProto nodeAction = 4;
* @return The nodeAction.
*/
@java.lang.Override public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeActionProto getNodeAction() {
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeActionProto result = org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeActionProto.forNumber(nodeAction_);
return result == null ? org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeActionProto.NORMAL : result;
}
public static final int CONTAINERS_TO_CLEANUP_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private java.util.List containersToCleanup_;
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_cleanup = 5;
*/
@java.lang.Override
public java.util.List getContainersToCleanupList() {
return containersToCleanup_;
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_cleanup = 5;
*/
@java.lang.Override
public java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>
getContainersToCleanupOrBuilderList() {
return containersToCleanup_;
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_cleanup = 5;
*/
@java.lang.Override
public int getContainersToCleanupCount() {
return containersToCleanup_.size();
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_cleanup = 5;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainersToCleanup(int index) {
return containersToCleanup_.get(index);
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_cleanup = 5;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainersToCleanupOrBuilder(
int index) {
return containersToCleanup_.get(index);
}
public static final int APPLICATIONS_TO_CLEANUP_FIELD_NUMBER = 6;
@SuppressWarnings("serial")
private java.util.List applicationsToCleanup_;
/**
* repeated .hadoop.yarn.ApplicationIdProto applications_to_cleanup = 6;
*/
@java.lang.Override
public java.util.List getApplicationsToCleanupList() {
return applicationsToCleanup_;
}
/**
* repeated .hadoop.yarn.ApplicationIdProto applications_to_cleanup = 6;
*/
@java.lang.Override
public java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>
getApplicationsToCleanupOrBuilderList() {
return applicationsToCleanup_;
}
/**
* repeated .hadoop.yarn.ApplicationIdProto applications_to_cleanup = 6;
*/
@java.lang.Override
public int getApplicationsToCleanupCount() {
return applicationsToCleanup_.size();
}
/**
* repeated .hadoop.yarn.ApplicationIdProto applications_to_cleanup = 6;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationsToCleanup(int index) {
return applicationsToCleanup_.get(index);
}
/**
* repeated .hadoop.yarn.ApplicationIdProto applications_to_cleanup = 6;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationsToCleanupOrBuilder(
int index) {
return applicationsToCleanup_.get(index);
}
public static final int NEXTHEARTBEATINTERVAL_FIELD_NUMBER = 7;
private long nextHeartBeatInterval_ = 0L;
/**
* optional int64 nextHeartBeatInterval = 7;
* @return Whether the nextHeartBeatInterval field is set.
*/
@java.lang.Override
public boolean hasNextHeartBeatInterval() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* optional int64 nextHeartBeatInterval = 7;
* @return The nextHeartBeatInterval.
*/
@java.lang.Override
public long getNextHeartBeatInterval() {
return nextHeartBeatInterval_;
}
public static final int DIAGNOSTICS_MESSAGE_FIELD_NUMBER = 8;
@SuppressWarnings("serial")
private volatile java.lang.Object diagnosticsMessage_ = "";
/**
* optional string diagnostics_message = 8;
* @return Whether the diagnosticsMessage field is set.
*/
@java.lang.Override
public boolean hasDiagnosticsMessage() {
return ((bitField0_ & 0x00000020) != 0);
}
/**
* optional string diagnostics_message = 8;
* @return The diagnosticsMessage.
*/
@java.lang.Override
public java.lang.String getDiagnosticsMessage() {
java.lang.Object ref = diagnosticsMessage_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
diagnosticsMessage_ = s;
}
return s;
}
}
/**
* optional string diagnostics_message = 8;
* @return The bytes for diagnosticsMessage.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getDiagnosticsMessageBytes() {
java.lang.Object ref = diagnosticsMessage_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
diagnosticsMessage_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int CONTAINERS_TO_BE_REMOVED_FROM_NM_FIELD_NUMBER = 9;
@SuppressWarnings("serial")
private java.util.List containersToBeRemovedFromNm_;
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_be_removed_from_nm = 9;
*/
@java.lang.Override
public java.util.List getContainersToBeRemovedFromNmList() {
return containersToBeRemovedFromNm_;
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_be_removed_from_nm = 9;
*/
@java.lang.Override
public java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>
getContainersToBeRemovedFromNmOrBuilderList() {
return containersToBeRemovedFromNm_;
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_be_removed_from_nm = 9;
*/
@java.lang.Override
public int getContainersToBeRemovedFromNmCount() {
return containersToBeRemovedFromNm_.size();
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_be_removed_from_nm = 9;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainersToBeRemovedFromNm(int index) {
return containersToBeRemovedFromNm_.get(index);
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_be_removed_from_nm = 9;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainersToBeRemovedFromNmOrBuilder(
int index) {
return containersToBeRemovedFromNm_.get(index);
}
public static final int SYSTEM_CREDENTIALS_FOR_APPS_FIELD_NUMBER = 10;
@SuppressWarnings("serial")
private java.util.List systemCredentialsForApps_;
/**
* repeated .hadoop.yarn.SystemCredentialsForAppsProto system_credentials_for_apps = 10;
*/
@java.lang.Override
public java.util.List getSystemCredentialsForAppsList() {
return systemCredentialsForApps_;
}
/**
* repeated .hadoop.yarn.SystemCredentialsForAppsProto system_credentials_for_apps = 10;
*/
@java.lang.Override
public java.util.List extends org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProtoOrBuilder>
getSystemCredentialsForAppsOrBuilderList() {
return systemCredentialsForApps_;
}
/**
* repeated .hadoop.yarn.SystemCredentialsForAppsProto system_credentials_for_apps = 10;
*/
@java.lang.Override
public int getSystemCredentialsForAppsCount() {
return systemCredentialsForApps_.size();
}
/**
* repeated .hadoop.yarn.SystemCredentialsForAppsProto system_credentials_for_apps = 10;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto getSystemCredentialsForApps(int index) {
return systemCredentialsForApps_.get(index);
}
/**
* repeated .hadoop.yarn.SystemCredentialsForAppsProto system_credentials_for_apps = 10;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProtoOrBuilder getSystemCredentialsForAppsOrBuilder(
int index) {
return systemCredentialsForApps_.get(index);
}
public static final int ARENODELABELSACCEPTEDBYRM_FIELD_NUMBER = 11;
private boolean areNodeLabelsAcceptedByRM_ = false;
/**
* optional bool areNodeLabelsAcceptedByRM = 11 [default = false];
* @return Whether the areNodeLabelsAcceptedByRM field is set.
*/
@java.lang.Override
public boolean hasAreNodeLabelsAcceptedByRM() {
return ((bitField0_ & 0x00000040) != 0);
}
/**
* optional bool areNodeLabelsAcceptedByRM = 11 [default = false];
* @return The areNodeLabelsAcceptedByRM.
*/
@java.lang.Override
public boolean getAreNodeLabelsAcceptedByRM() {
return areNodeLabelsAcceptedByRM_;
}
public static final int CONTAINERS_TO_DECREASE_FIELD_NUMBER = 12;
@SuppressWarnings("serial")
private java.util.List containersToDecrease_;
/**
*
* to be deprecated in favour of containers_to_update
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_decrease = 12;
*/
@java.lang.Override
public java.util.List getContainersToDecreaseList() {
return containersToDecrease_;
}
/**
*
* to be deprecated in favour of containers_to_update
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_decrease = 12;
*/
@java.lang.Override
public java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder>
getContainersToDecreaseOrBuilderList() {
return containersToDecrease_;
}
/**
*
* to be deprecated in favour of containers_to_update
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_decrease = 12;
*/
@java.lang.Override
public int getContainersToDecreaseCount() {
return containersToDecrease_.size();
}
/**
*
* to be deprecated in favour of containers_to_update
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_decrease = 12;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getContainersToDecrease(int index) {
return containersToDecrease_.get(index);
}
/**
*
* to be deprecated in favour of containers_to_update
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_decrease = 12;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder getContainersToDecreaseOrBuilder(
int index) {
return containersToDecrease_.get(index);
}
public static final int CONTAINERS_TO_SIGNAL_FIELD_NUMBER = 13;
@SuppressWarnings("serial")
private java.util.List containersToSignal_;
/**
* repeated .hadoop.yarn.SignalContainerRequestProto containers_to_signal = 13;
*/
@java.lang.Override
public java.util.List getContainersToSignalList() {
return containersToSignal_;
}
/**
* repeated .hadoop.yarn.SignalContainerRequestProto containers_to_signal = 13;
*/
@java.lang.Override
public java.util.List extends org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProtoOrBuilder>
getContainersToSignalOrBuilderList() {
return containersToSignal_;
}
/**
* repeated .hadoop.yarn.SignalContainerRequestProto containers_to_signal = 13;
*/
@java.lang.Override
public int getContainersToSignalCount() {
return containersToSignal_.size();
}
/**
* repeated .hadoop.yarn.SignalContainerRequestProto containers_to_signal = 13;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto getContainersToSignal(int index) {
return containersToSignal_.get(index);
}
/**
* repeated .hadoop.yarn.SignalContainerRequestProto containers_to_signal = 13;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProtoOrBuilder getContainersToSignalOrBuilder(
int index) {
return containersToSignal_.get(index);
}
public static final int RESOURCE_FIELD_NUMBER = 14;
private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resource_;
/**
* optional .hadoop.yarn.ResourceProto resource = 14;
* @return Whether the resource field is set.
*/
@java.lang.Override
public boolean hasResource() {
return ((bitField0_ & 0x00000080) != 0);
}
/**
* optional .hadoop.yarn.ResourceProto resource = 14;
* @return The resource.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource() {
return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
}
/**
* optional .hadoop.yarn.ResourceProto resource = 14;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder() {
return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
}
public static final int CONTAINER_QUEUING_LIMIT_FIELD_NUMBER = 15;
private org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto containerQueuingLimit_;
/**
* optional .hadoop.yarn.ContainerQueuingLimitProto container_queuing_limit = 15;
* @return Whether the containerQueuingLimit field is set.
*/
@java.lang.Override
public boolean hasContainerQueuingLimit() {
return ((bitField0_ & 0x00000100) != 0);
}
/**
* optional .hadoop.yarn.ContainerQueuingLimitProto container_queuing_limit = 15;
* @return The containerQueuingLimit.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto getContainerQueuingLimit() {
return containerQueuingLimit_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto.getDefaultInstance() : containerQueuingLimit_;
}
/**
* optional .hadoop.yarn.ContainerQueuingLimitProto container_queuing_limit = 15;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProtoOrBuilder getContainerQueuingLimitOrBuilder() {
return containerQueuingLimit_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto.getDefaultInstance() : containerQueuingLimit_;
}
public static final int APP_COLLECTORS_FIELD_NUMBER = 16;
@SuppressWarnings("serial")
private java.util.List appCollectors_;
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 16;
*/
@java.lang.Override
public java.util.List getAppCollectorsList() {
return appCollectors_;
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 16;
*/
@java.lang.Override
public java.util.List extends org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProtoOrBuilder>
getAppCollectorsOrBuilderList() {
return appCollectors_;
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 16;
*/
@java.lang.Override
public int getAppCollectorsCount() {
return appCollectors_.size();
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 16;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto getAppCollectors(int index) {
return appCollectors_.get(index);
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 16;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProtoOrBuilder getAppCollectorsOrBuilder(
int index) {
return appCollectors_.get(index);
}
public static final int CONTAINERS_TO_UPDATE_FIELD_NUMBER = 17;
@SuppressWarnings("serial")
private java.util.List containersToUpdate_;
/**
*
* to be used in place of containers_to_decrease
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_update = 17;
*/
@java.lang.Override
public java.util.List getContainersToUpdateList() {
return containersToUpdate_;
}
/**
*
* to be used in place of containers_to_decrease
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_update = 17;
*/
@java.lang.Override
public java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder>
getContainersToUpdateOrBuilderList() {
return containersToUpdate_;
}
/**
*
* to be used in place of containers_to_decrease
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_update = 17;
*/
@java.lang.Override
public int getContainersToUpdateCount() {
return containersToUpdate_.size();
}
/**
*
* to be used in place of containers_to_decrease
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_update = 17;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getContainersToUpdate(int index) {
return containersToUpdate_.get(index);
}
/**
*
* to be used in place of containers_to_decrease
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_update = 17;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder getContainersToUpdateOrBuilder(
int index) {
return containersToUpdate_.get(index);
}
public static final int ARENODEATTRIBUTESACCEPTEDBYRM_FIELD_NUMBER = 18;
private boolean areNodeAttributesAcceptedByRM_ = false;
/**
* optional bool areNodeAttributesAcceptedByRM = 18 [default = false];
* @return Whether the areNodeAttributesAcceptedByRM field is set.
*/
@java.lang.Override
public boolean hasAreNodeAttributesAcceptedByRM() {
return ((bitField0_ & 0x00000200) != 0);
}
/**
* optional bool areNodeAttributesAcceptedByRM = 18 [default = false];
* @return The areNodeAttributesAcceptedByRM.
*/
@java.lang.Override
public boolean getAreNodeAttributesAcceptedByRM() {
return areNodeAttributesAcceptedByRM_;
}
public static final int TOKENSEQUENCENO_FIELD_NUMBER = 19;
private long tokenSequenceNo_ = 0L;
/**
* optional int64 tokenSequenceNo = 19;
* @return Whether the tokenSequenceNo field is set.
*/
@java.lang.Override
public boolean hasTokenSequenceNo() {
return ((bitField0_ & 0x00000400) != 0);
}
/**
* optional int64 tokenSequenceNo = 19;
* @return The tokenSequenceNo.
*/
@java.lang.Override
public long getTokenSequenceNo() {
return tokenSequenceNo_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
for (int i = 0; i < getContainersToDecreaseCount(); i++) {
if (!getContainersToDecrease(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
for (int i = 0; i < getContainersToSignalCount(); i++) {
if (!getContainersToSignal(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
if (hasResource()) {
if (!getResource().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
for (int i = 0; i < getAppCollectorsCount(); i++) {
if (!getAppCollectors(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
for (int i = 0; i < getContainersToUpdateCount(); i++) {
if (!getContainersToUpdate(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeInt32(1, responseId_);
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getContainerTokenMasterKey());
}
if (((bitField0_ & 0x00000004) != 0)) {
output.writeMessage(3, getNmTokenMasterKey());
}
if (((bitField0_ & 0x00000008) != 0)) {
output.writeEnum(4, nodeAction_);
}
for (int i = 0; i < containersToCleanup_.size(); i++) {
output.writeMessage(5, containersToCleanup_.get(i));
}
for (int i = 0; i < applicationsToCleanup_.size(); i++) {
output.writeMessage(6, applicationsToCleanup_.get(i));
}
if (((bitField0_ & 0x00000010) != 0)) {
output.writeInt64(7, nextHeartBeatInterval_);
}
if (((bitField0_ & 0x00000020) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 8, diagnosticsMessage_);
}
for (int i = 0; i < containersToBeRemovedFromNm_.size(); i++) {
output.writeMessage(9, containersToBeRemovedFromNm_.get(i));
}
for (int i = 0; i < systemCredentialsForApps_.size(); i++) {
output.writeMessage(10, systemCredentialsForApps_.get(i));
}
if (((bitField0_ & 0x00000040) != 0)) {
output.writeBool(11, areNodeLabelsAcceptedByRM_);
}
for (int i = 0; i < containersToDecrease_.size(); i++) {
output.writeMessage(12, containersToDecrease_.get(i));
}
for (int i = 0; i < containersToSignal_.size(); i++) {
output.writeMessage(13, containersToSignal_.get(i));
}
if (((bitField0_ & 0x00000080) != 0)) {
output.writeMessage(14, getResource());
}
if (((bitField0_ & 0x00000100) != 0)) {
output.writeMessage(15, getContainerQueuingLimit());
}
for (int i = 0; i < appCollectors_.size(); i++) {
output.writeMessage(16, appCollectors_.get(i));
}
for (int i = 0; i < containersToUpdate_.size(); i++) {
output.writeMessage(17, containersToUpdate_.get(i));
}
if (((bitField0_ & 0x00000200) != 0)) {
output.writeBool(18, areNodeAttributesAcceptedByRM_);
}
if (((bitField0_ & 0x00000400) != 0)) {
output.writeInt64(19, tokenSequenceNo_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt32Size(1, responseId_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(2, getContainerTokenMasterKey());
}
if (((bitField0_ & 0x00000004) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(3, getNmTokenMasterKey());
}
if (((bitField0_ & 0x00000008) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeEnumSize(4, nodeAction_);
}
for (int i = 0; i < containersToCleanup_.size(); i++) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(5, containersToCleanup_.get(i));
}
for (int i = 0; i < applicationsToCleanup_.size(); i++) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(6, applicationsToCleanup_.get(i));
}
if (((bitField0_ & 0x00000010) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt64Size(7, nextHeartBeatInterval_);
}
if (((bitField0_ & 0x00000020) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(8, diagnosticsMessage_);
}
for (int i = 0; i < containersToBeRemovedFromNm_.size(); i++) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(9, containersToBeRemovedFromNm_.get(i));
}
for (int i = 0; i < systemCredentialsForApps_.size(); i++) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(10, systemCredentialsForApps_.get(i));
}
if (((bitField0_ & 0x00000040) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeBoolSize(11, areNodeLabelsAcceptedByRM_);
}
for (int i = 0; i < containersToDecrease_.size(); i++) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(12, containersToDecrease_.get(i));
}
for (int i = 0; i < containersToSignal_.size(); i++) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(13, containersToSignal_.get(i));
}
if (((bitField0_ & 0x00000080) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(14, getResource());
}
if (((bitField0_ & 0x00000100) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(15, getContainerQueuingLimit());
}
for (int i = 0; i < appCollectors_.size(); i++) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(16, appCollectors_.get(i));
}
for (int i = 0; i < containersToUpdate_.size(); i++) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(17, containersToUpdate_.get(i));
}
if (((bitField0_ & 0x00000200) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeBoolSize(18, areNodeAttributesAcceptedByRM_);
}
if (((bitField0_ & 0x00000400) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt64Size(19, tokenSequenceNo_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto other = (org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto) obj;
if (hasResponseId() != other.hasResponseId()) return false;
if (hasResponseId()) {
if (getResponseId()
!= other.getResponseId()) return false;
}
if (hasContainerTokenMasterKey() != other.hasContainerTokenMasterKey()) return false;
if (hasContainerTokenMasterKey()) {
if (!getContainerTokenMasterKey()
.equals(other.getContainerTokenMasterKey())) return false;
}
if (hasNmTokenMasterKey() != other.hasNmTokenMasterKey()) return false;
if (hasNmTokenMasterKey()) {
if (!getNmTokenMasterKey()
.equals(other.getNmTokenMasterKey())) return false;
}
if (hasNodeAction() != other.hasNodeAction()) return false;
if (hasNodeAction()) {
if (nodeAction_ != other.nodeAction_) return false;
}
if (!getContainersToCleanupList()
.equals(other.getContainersToCleanupList())) return false;
if (!getApplicationsToCleanupList()
.equals(other.getApplicationsToCleanupList())) return false;
if (hasNextHeartBeatInterval() != other.hasNextHeartBeatInterval()) return false;
if (hasNextHeartBeatInterval()) {
if (getNextHeartBeatInterval()
!= other.getNextHeartBeatInterval()) return false;
}
if (hasDiagnosticsMessage() != other.hasDiagnosticsMessage()) return false;
if (hasDiagnosticsMessage()) {
if (!getDiagnosticsMessage()
.equals(other.getDiagnosticsMessage())) return false;
}
if (!getContainersToBeRemovedFromNmList()
.equals(other.getContainersToBeRemovedFromNmList())) return false;
if (!getSystemCredentialsForAppsList()
.equals(other.getSystemCredentialsForAppsList())) return false;
if (hasAreNodeLabelsAcceptedByRM() != other.hasAreNodeLabelsAcceptedByRM()) return false;
if (hasAreNodeLabelsAcceptedByRM()) {
if (getAreNodeLabelsAcceptedByRM()
!= other.getAreNodeLabelsAcceptedByRM()) return false;
}
if (!getContainersToDecreaseList()
.equals(other.getContainersToDecreaseList())) return false;
if (!getContainersToSignalList()
.equals(other.getContainersToSignalList())) return false;
if (hasResource() != other.hasResource()) return false;
if (hasResource()) {
if (!getResource()
.equals(other.getResource())) return false;
}
if (hasContainerQueuingLimit() != other.hasContainerQueuingLimit()) return false;
if (hasContainerQueuingLimit()) {
if (!getContainerQueuingLimit()
.equals(other.getContainerQueuingLimit())) return false;
}
if (!getAppCollectorsList()
.equals(other.getAppCollectorsList())) return false;
if (!getContainersToUpdateList()
.equals(other.getContainersToUpdateList())) return false;
if (hasAreNodeAttributesAcceptedByRM() != other.hasAreNodeAttributesAcceptedByRM()) return false;
if (hasAreNodeAttributesAcceptedByRM()) {
if (getAreNodeAttributesAcceptedByRM()
!= other.getAreNodeAttributesAcceptedByRM()) return false;
}
if (hasTokenSequenceNo() != other.hasTokenSequenceNo()) return false;
if (hasTokenSequenceNo()) {
if (getTokenSequenceNo()
!= other.getTokenSequenceNo()) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasResponseId()) {
hash = (37 * hash) + RESPONSE_ID_FIELD_NUMBER;
hash = (53 * hash) + getResponseId();
}
if (hasContainerTokenMasterKey()) {
hash = (37 * hash) + CONTAINER_TOKEN_MASTER_KEY_FIELD_NUMBER;
hash = (53 * hash) + getContainerTokenMasterKey().hashCode();
}
if (hasNmTokenMasterKey()) {
hash = (37 * hash) + NM_TOKEN_MASTER_KEY_FIELD_NUMBER;
hash = (53 * hash) + getNmTokenMasterKey().hashCode();
}
if (hasNodeAction()) {
hash = (37 * hash) + NODEACTION_FIELD_NUMBER;
hash = (53 * hash) + nodeAction_;
}
if (getContainersToCleanupCount() > 0) {
hash = (37 * hash) + CONTAINERS_TO_CLEANUP_FIELD_NUMBER;
hash = (53 * hash) + getContainersToCleanupList().hashCode();
}
if (getApplicationsToCleanupCount() > 0) {
hash = (37 * hash) + APPLICATIONS_TO_CLEANUP_FIELD_NUMBER;
hash = (53 * hash) + getApplicationsToCleanupList().hashCode();
}
if (hasNextHeartBeatInterval()) {
hash = (37 * hash) + NEXTHEARTBEATINTERVAL_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
getNextHeartBeatInterval());
}
if (hasDiagnosticsMessage()) {
hash = (37 * hash) + DIAGNOSTICS_MESSAGE_FIELD_NUMBER;
hash = (53 * hash) + getDiagnosticsMessage().hashCode();
}
if (getContainersToBeRemovedFromNmCount() > 0) {
hash = (37 * hash) + CONTAINERS_TO_BE_REMOVED_FROM_NM_FIELD_NUMBER;
hash = (53 * hash) + getContainersToBeRemovedFromNmList().hashCode();
}
if (getSystemCredentialsForAppsCount() > 0) {
hash = (37 * hash) + SYSTEM_CREDENTIALS_FOR_APPS_FIELD_NUMBER;
hash = (53 * hash) + getSystemCredentialsForAppsList().hashCode();
}
if (hasAreNodeLabelsAcceptedByRM()) {
hash = (37 * hash) + ARENODELABELSACCEPTEDBYRM_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean(
getAreNodeLabelsAcceptedByRM());
}
if (getContainersToDecreaseCount() > 0) {
hash = (37 * hash) + CONTAINERS_TO_DECREASE_FIELD_NUMBER;
hash = (53 * hash) + getContainersToDecreaseList().hashCode();
}
if (getContainersToSignalCount() > 0) {
hash = (37 * hash) + CONTAINERS_TO_SIGNAL_FIELD_NUMBER;
hash = (53 * hash) + getContainersToSignalList().hashCode();
}
if (hasResource()) {
hash = (37 * hash) + RESOURCE_FIELD_NUMBER;
hash = (53 * hash) + getResource().hashCode();
}
if (hasContainerQueuingLimit()) {
hash = (37 * hash) + CONTAINER_QUEUING_LIMIT_FIELD_NUMBER;
hash = (53 * hash) + getContainerQueuingLimit().hashCode();
}
if (getAppCollectorsCount() > 0) {
hash = (37 * hash) + APP_COLLECTORS_FIELD_NUMBER;
hash = (53 * hash) + getAppCollectorsList().hashCode();
}
if (getContainersToUpdateCount() > 0) {
hash = (37 * hash) + CONTAINERS_TO_UPDATE_FIELD_NUMBER;
hash = (53 * hash) + getContainersToUpdateList().hashCode();
}
if (hasAreNodeAttributesAcceptedByRM()) {
hash = (37 * hash) + ARENODEATTRIBUTESACCEPTEDBYRM_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean(
getAreNodeAttributesAcceptedByRM());
}
if (hasTokenSequenceNo()) {
hash = (37 * hash) + TOKENSEQUENCENO_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
getTokenSequenceNo());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.NodeHeartbeatResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.NodeHeartbeatResponseProto)
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_NodeHeartbeatResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_NodeHeartbeatResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getContainerTokenMasterKeyFieldBuilder();
getNmTokenMasterKeyFieldBuilder();
getContainersToCleanupFieldBuilder();
getApplicationsToCleanupFieldBuilder();
getContainersToBeRemovedFromNmFieldBuilder();
getSystemCredentialsForAppsFieldBuilder();
getContainersToDecreaseFieldBuilder();
getContainersToSignalFieldBuilder();
getResourceFieldBuilder();
getContainerQueuingLimitFieldBuilder();
getAppCollectorsFieldBuilder();
getContainersToUpdateFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
responseId_ = 0;
containerTokenMasterKey_ = null;
if (containerTokenMasterKeyBuilder_ != null) {
containerTokenMasterKeyBuilder_.dispose();
containerTokenMasterKeyBuilder_ = null;
}
nmTokenMasterKey_ = null;
if (nmTokenMasterKeyBuilder_ != null) {
nmTokenMasterKeyBuilder_.dispose();
nmTokenMasterKeyBuilder_ = null;
}
nodeAction_ = 0;
if (containersToCleanupBuilder_ == null) {
containersToCleanup_ = java.util.Collections.emptyList();
} else {
containersToCleanup_ = null;
containersToCleanupBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000010);
if (applicationsToCleanupBuilder_ == null) {
applicationsToCleanup_ = java.util.Collections.emptyList();
} else {
applicationsToCleanup_ = null;
applicationsToCleanupBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000020);
nextHeartBeatInterval_ = 0L;
diagnosticsMessage_ = "";
if (containersToBeRemovedFromNmBuilder_ == null) {
containersToBeRemovedFromNm_ = java.util.Collections.emptyList();
} else {
containersToBeRemovedFromNm_ = null;
containersToBeRemovedFromNmBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000100);
if (systemCredentialsForAppsBuilder_ == null) {
systemCredentialsForApps_ = java.util.Collections.emptyList();
} else {
systemCredentialsForApps_ = null;
systemCredentialsForAppsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000200);
areNodeLabelsAcceptedByRM_ = false;
if (containersToDecreaseBuilder_ == null) {
containersToDecrease_ = java.util.Collections.emptyList();
} else {
containersToDecrease_ = null;
containersToDecreaseBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000800);
if (containersToSignalBuilder_ == null) {
containersToSignal_ = java.util.Collections.emptyList();
} else {
containersToSignal_ = null;
containersToSignalBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00001000);
resource_ = null;
if (resourceBuilder_ != null) {
resourceBuilder_.dispose();
resourceBuilder_ = null;
}
containerQueuingLimit_ = null;
if (containerQueuingLimitBuilder_ != null) {
containerQueuingLimitBuilder_.dispose();
containerQueuingLimitBuilder_ = null;
}
if (appCollectorsBuilder_ == null) {
appCollectors_ = java.util.Collections.emptyList();
} else {
appCollectors_ = null;
appCollectorsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00008000);
if (containersToUpdateBuilder_ == null) {
containersToUpdate_ = java.util.Collections.emptyList();
} else {
containersToUpdate_ = null;
containersToUpdateBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00010000);
areNodeAttributesAcceptedByRM_ = false;
tokenSequenceNo_ = 0L;
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_NodeHeartbeatResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto build() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto result = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto result) {
if (containersToCleanupBuilder_ == null) {
if (((bitField0_ & 0x00000010) != 0)) {
containersToCleanup_ = java.util.Collections.unmodifiableList(containersToCleanup_);
bitField0_ = (bitField0_ & ~0x00000010);
}
result.containersToCleanup_ = containersToCleanup_;
} else {
result.containersToCleanup_ = containersToCleanupBuilder_.build();
}
if (applicationsToCleanupBuilder_ == null) {
if (((bitField0_ & 0x00000020) != 0)) {
applicationsToCleanup_ = java.util.Collections.unmodifiableList(applicationsToCleanup_);
bitField0_ = (bitField0_ & ~0x00000020);
}
result.applicationsToCleanup_ = applicationsToCleanup_;
} else {
result.applicationsToCleanup_ = applicationsToCleanupBuilder_.build();
}
if (containersToBeRemovedFromNmBuilder_ == null) {
if (((bitField0_ & 0x00000100) != 0)) {
containersToBeRemovedFromNm_ = java.util.Collections.unmodifiableList(containersToBeRemovedFromNm_);
bitField0_ = (bitField0_ & ~0x00000100);
}
result.containersToBeRemovedFromNm_ = containersToBeRemovedFromNm_;
} else {
result.containersToBeRemovedFromNm_ = containersToBeRemovedFromNmBuilder_.build();
}
if (systemCredentialsForAppsBuilder_ == null) {
if (((bitField0_ & 0x00000200) != 0)) {
systemCredentialsForApps_ = java.util.Collections.unmodifiableList(systemCredentialsForApps_);
bitField0_ = (bitField0_ & ~0x00000200);
}
result.systemCredentialsForApps_ = systemCredentialsForApps_;
} else {
result.systemCredentialsForApps_ = systemCredentialsForAppsBuilder_.build();
}
if (containersToDecreaseBuilder_ == null) {
if (((bitField0_ & 0x00000800) != 0)) {
containersToDecrease_ = java.util.Collections.unmodifiableList(containersToDecrease_);
bitField0_ = (bitField0_ & ~0x00000800);
}
result.containersToDecrease_ = containersToDecrease_;
} else {
result.containersToDecrease_ = containersToDecreaseBuilder_.build();
}
if (containersToSignalBuilder_ == null) {
if (((bitField0_ & 0x00001000) != 0)) {
containersToSignal_ = java.util.Collections.unmodifiableList(containersToSignal_);
bitField0_ = (bitField0_ & ~0x00001000);
}
result.containersToSignal_ = containersToSignal_;
} else {
result.containersToSignal_ = containersToSignalBuilder_.build();
}
if (appCollectorsBuilder_ == null) {
if (((bitField0_ & 0x00008000) != 0)) {
appCollectors_ = java.util.Collections.unmodifiableList(appCollectors_);
bitField0_ = (bitField0_ & ~0x00008000);
}
result.appCollectors_ = appCollectors_;
} else {
result.appCollectors_ = appCollectorsBuilder_.build();
}
if (containersToUpdateBuilder_ == null) {
if (((bitField0_ & 0x00010000) != 0)) {
containersToUpdate_ = java.util.Collections.unmodifiableList(containersToUpdate_);
bitField0_ = (bitField0_ & ~0x00010000);
}
result.containersToUpdate_ = containersToUpdate_;
} else {
result.containersToUpdate_ = containersToUpdateBuilder_.build();
}
}
private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.responseId_ = responseId_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.containerTokenMasterKey_ = containerTokenMasterKeyBuilder_ == null
? containerTokenMasterKey_
: containerTokenMasterKeyBuilder_.build();
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.nmTokenMasterKey_ = nmTokenMasterKeyBuilder_ == null
? nmTokenMasterKey_
: nmTokenMasterKeyBuilder_.build();
to_bitField0_ |= 0x00000004;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.nodeAction_ = nodeAction_;
to_bitField0_ |= 0x00000008;
}
if (((from_bitField0_ & 0x00000040) != 0)) {
result.nextHeartBeatInterval_ = nextHeartBeatInterval_;
to_bitField0_ |= 0x00000010;
}
if (((from_bitField0_ & 0x00000080) != 0)) {
result.diagnosticsMessage_ = diagnosticsMessage_;
to_bitField0_ |= 0x00000020;
}
if (((from_bitField0_ & 0x00000400) != 0)) {
result.areNodeLabelsAcceptedByRM_ = areNodeLabelsAcceptedByRM_;
to_bitField0_ |= 0x00000040;
}
if (((from_bitField0_ & 0x00002000) != 0)) {
result.resource_ = resourceBuilder_ == null
? resource_
: resourceBuilder_.build();
to_bitField0_ |= 0x00000080;
}
if (((from_bitField0_ & 0x00004000) != 0)) {
result.containerQueuingLimit_ = containerQueuingLimitBuilder_ == null
? containerQueuingLimit_
: containerQueuingLimitBuilder_.build();
to_bitField0_ |= 0x00000100;
}
if (((from_bitField0_ & 0x00020000) != 0)) {
result.areNodeAttributesAcceptedByRM_ = areNodeAttributesAcceptedByRM_;
to_bitField0_ |= 0x00000200;
}
if (((from_bitField0_ & 0x00040000) != 0)) {
result.tokenSequenceNo_ = tokenSequenceNo_;
to_bitField0_ |= 0x00000400;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto) {
return mergeFrom((org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto.getDefaultInstance()) return this;
if (other.hasResponseId()) {
setResponseId(other.getResponseId());
}
if (other.hasContainerTokenMasterKey()) {
mergeContainerTokenMasterKey(other.getContainerTokenMasterKey());
}
if (other.hasNmTokenMasterKey()) {
mergeNmTokenMasterKey(other.getNmTokenMasterKey());
}
if (other.hasNodeAction()) {
setNodeAction(other.getNodeAction());
}
if (containersToCleanupBuilder_ == null) {
if (!other.containersToCleanup_.isEmpty()) {
if (containersToCleanup_.isEmpty()) {
containersToCleanup_ = other.containersToCleanup_;
bitField0_ = (bitField0_ & ~0x00000010);
} else {
ensureContainersToCleanupIsMutable();
containersToCleanup_.addAll(other.containersToCleanup_);
}
onChanged();
}
} else {
if (!other.containersToCleanup_.isEmpty()) {
if (containersToCleanupBuilder_.isEmpty()) {
containersToCleanupBuilder_.dispose();
containersToCleanupBuilder_ = null;
containersToCleanup_ = other.containersToCleanup_;
bitField0_ = (bitField0_ & ~0x00000010);
containersToCleanupBuilder_ =
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getContainersToCleanupFieldBuilder() : null;
} else {
containersToCleanupBuilder_.addAllMessages(other.containersToCleanup_);
}
}
}
if (applicationsToCleanupBuilder_ == null) {
if (!other.applicationsToCleanup_.isEmpty()) {
if (applicationsToCleanup_.isEmpty()) {
applicationsToCleanup_ = other.applicationsToCleanup_;
bitField0_ = (bitField0_ & ~0x00000020);
} else {
ensureApplicationsToCleanupIsMutable();
applicationsToCleanup_.addAll(other.applicationsToCleanup_);
}
onChanged();
}
} else {
if (!other.applicationsToCleanup_.isEmpty()) {
if (applicationsToCleanupBuilder_.isEmpty()) {
applicationsToCleanupBuilder_.dispose();
applicationsToCleanupBuilder_ = null;
applicationsToCleanup_ = other.applicationsToCleanup_;
bitField0_ = (bitField0_ & ~0x00000020);
applicationsToCleanupBuilder_ =
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getApplicationsToCleanupFieldBuilder() : null;
} else {
applicationsToCleanupBuilder_.addAllMessages(other.applicationsToCleanup_);
}
}
}
if (other.hasNextHeartBeatInterval()) {
setNextHeartBeatInterval(other.getNextHeartBeatInterval());
}
if (other.hasDiagnosticsMessage()) {
diagnosticsMessage_ = other.diagnosticsMessage_;
bitField0_ |= 0x00000080;
onChanged();
}
if (containersToBeRemovedFromNmBuilder_ == null) {
if (!other.containersToBeRemovedFromNm_.isEmpty()) {
if (containersToBeRemovedFromNm_.isEmpty()) {
containersToBeRemovedFromNm_ = other.containersToBeRemovedFromNm_;
bitField0_ = (bitField0_ & ~0x00000100);
} else {
ensureContainersToBeRemovedFromNmIsMutable();
containersToBeRemovedFromNm_.addAll(other.containersToBeRemovedFromNm_);
}
onChanged();
}
} else {
if (!other.containersToBeRemovedFromNm_.isEmpty()) {
if (containersToBeRemovedFromNmBuilder_.isEmpty()) {
containersToBeRemovedFromNmBuilder_.dispose();
containersToBeRemovedFromNmBuilder_ = null;
containersToBeRemovedFromNm_ = other.containersToBeRemovedFromNm_;
bitField0_ = (bitField0_ & ~0x00000100);
containersToBeRemovedFromNmBuilder_ =
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getContainersToBeRemovedFromNmFieldBuilder() : null;
} else {
containersToBeRemovedFromNmBuilder_.addAllMessages(other.containersToBeRemovedFromNm_);
}
}
}
if (systemCredentialsForAppsBuilder_ == null) {
if (!other.systemCredentialsForApps_.isEmpty()) {
if (systemCredentialsForApps_.isEmpty()) {
systemCredentialsForApps_ = other.systemCredentialsForApps_;
bitField0_ = (bitField0_ & ~0x00000200);
} else {
ensureSystemCredentialsForAppsIsMutable();
systemCredentialsForApps_.addAll(other.systemCredentialsForApps_);
}
onChanged();
}
} else {
if (!other.systemCredentialsForApps_.isEmpty()) {
if (systemCredentialsForAppsBuilder_.isEmpty()) {
systemCredentialsForAppsBuilder_.dispose();
systemCredentialsForAppsBuilder_ = null;
systemCredentialsForApps_ = other.systemCredentialsForApps_;
bitField0_ = (bitField0_ & ~0x00000200);
systemCredentialsForAppsBuilder_ =
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getSystemCredentialsForAppsFieldBuilder() : null;
} else {
systemCredentialsForAppsBuilder_.addAllMessages(other.systemCredentialsForApps_);
}
}
}
if (other.hasAreNodeLabelsAcceptedByRM()) {
setAreNodeLabelsAcceptedByRM(other.getAreNodeLabelsAcceptedByRM());
}
if (containersToDecreaseBuilder_ == null) {
if (!other.containersToDecrease_.isEmpty()) {
if (containersToDecrease_.isEmpty()) {
containersToDecrease_ = other.containersToDecrease_;
bitField0_ = (bitField0_ & ~0x00000800);
} else {
ensureContainersToDecreaseIsMutable();
containersToDecrease_.addAll(other.containersToDecrease_);
}
onChanged();
}
} else {
if (!other.containersToDecrease_.isEmpty()) {
if (containersToDecreaseBuilder_.isEmpty()) {
containersToDecreaseBuilder_.dispose();
containersToDecreaseBuilder_ = null;
containersToDecrease_ = other.containersToDecrease_;
bitField0_ = (bitField0_ & ~0x00000800);
containersToDecreaseBuilder_ =
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getContainersToDecreaseFieldBuilder() : null;
} else {
containersToDecreaseBuilder_.addAllMessages(other.containersToDecrease_);
}
}
}
if (containersToSignalBuilder_ == null) {
if (!other.containersToSignal_.isEmpty()) {
if (containersToSignal_.isEmpty()) {
containersToSignal_ = other.containersToSignal_;
bitField0_ = (bitField0_ & ~0x00001000);
} else {
ensureContainersToSignalIsMutable();
containersToSignal_.addAll(other.containersToSignal_);
}
onChanged();
}
} else {
if (!other.containersToSignal_.isEmpty()) {
if (containersToSignalBuilder_.isEmpty()) {
containersToSignalBuilder_.dispose();
containersToSignalBuilder_ = null;
containersToSignal_ = other.containersToSignal_;
bitField0_ = (bitField0_ & ~0x00001000);
containersToSignalBuilder_ =
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getContainersToSignalFieldBuilder() : null;
} else {
containersToSignalBuilder_.addAllMessages(other.containersToSignal_);
}
}
}
if (other.hasResource()) {
mergeResource(other.getResource());
}
if (other.hasContainerQueuingLimit()) {
mergeContainerQueuingLimit(other.getContainerQueuingLimit());
}
if (appCollectorsBuilder_ == null) {
if (!other.appCollectors_.isEmpty()) {
if (appCollectors_.isEmpty()) {
appCollectors_ = other.appCollectors_;
bitField0_ = (bitField0_ & ~0x00008000);
} else {
ensureAppCollectorsIsMutable();
appCollectors_.addAll(other.appCollectors_);
}
onChanged();
}
} else {
if (!other.appCollectors_.isEmpty()) {
if (appCollectorsBuilder_.isEmpty()) {
appCollectorsBuilder_.dispose();
appCollectorsBuilder_ = null;
appCollectors_ = other.appCollectors_;
bitField0_ = (bitField0_ & ~0x00008000);
appCollectorsBuilder_ =
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getAppCollectorsFieldBuilder() : null;
} else {
appCollectorsBuilder_.addAllMessages(other.appCollectors_);
}
}
}
if (containersToUpdateBuilder_ == null) {
if (!other.containersToUpdate_.isEmpty()) {
if (containersToUpdate_.isEmpty()) {
containersToUpdate_ = other.containersToUpdate_;
bitField0_ = (bitField0_ & ~0x00010000);
} else {
ensureContainersToUpdateIsMutable();
containersToUpdate_.addAll(other.containersToUpdate_);
}
onChanged();
}
} else {
if (!other.containersToUpdate_.isEmpty()) {
if (containersToUpdateBuilder_.isEmpty()) {
containersToUpdateBuilder_.dispose();
containersToUpdateBuilder_ = null;
containersToUpdate_ = other.containersToUpdate_;
bitField0_ = (bitField0_ & ~0x00010000);
containersToUpdateBuilder_ =
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getContainersToUpdateFieldBuilder() : null;
} else {
containersToUpdateBuilder_.addAllMessages(other.containersToUpdate_);
}
}
}
if (other.hasAreNodeAttributesAcceptedByRM()) {
setAreNodeAttributesAcceptedByRM(other.getAreNodeAttributesAcceptedByRM());
}
if (other.hasTokenSequenceNo()) {
setTokenSequenceNo(other.getTokenSequenceNo());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
for (int i = 0; i < getContainersToDecreaseCount(); i++) {
if (!getContainersToDecrease(i).isInitialized()) {
return false;
}
}
for (int i = 0; i < getContainersToSignalCount(); i++) {
if (!getContainersToSignal(i).isInitialized()) {
return false;
}
}
if (hasResource()) {
if (!getResource().isInitialized()) {
return false;
}
}
for (int i = 0; i < getAppCollectorsCount(); i++) {
if (!getAppCollectors(i).isInitialized()) {
return false;
}
}
for (int i = 0; i < getContainersToUpdateCount(); i++) {
if (!getContainersToUpdate(i).isInitialized()) {
return false;
}
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
responseId_ = input.readInt32();
bitField0_ |= 0x00000001;
break;
} // case 8
case 18: {
input.readMessage(
getContainerTokenMasterKeyFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26: {
input.readMessage(
getNmTokenMasterKeyFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
case 32: {
int tmpRaw = input.readEnum();
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeActionProto tmpValue =
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeActionProto.forNumber(tmpRaw);
if (tmpValue == null) {
mergeUnknownVarintField(4, tmpRaw);
} else {
nodeAction_ = tmpRaw;
bitField0_ |= 0x00000008;
}
break;
} // case 32
case 42: {
org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto m =
input.readMessage(
org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.PARSER,
extensionRegistry);
if (containersToCleanupBuilder_ == null) {
ensureContainersToCleanupIsMutable();
containersToCleanup_.add(m);
} else {
containersToCleanupBuilder_.addMessage(m);
}
break;
} // case 42
case 50: {
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto m =
input.readMessage(
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.PARSER,
extensionRegistry);
if (applicationsToCleanupBuilder_ == null) {
ensureApplicationsToCleanupIsMutable();
applicationsToCleanup_.add(m);
} else {
applicationsToCleanupBuilder_.addMessage(m);
}
break;
} // case 50
case 56: {
nextHeartBeatInterval_ = input.readInt64();
bitField0_ |= 0x00000040;
break;
} // case 56
case 66: {
diagnosticsMessage_ = input.readBytes();
bitField0_ |= 0x00000080;
break;
} // case 66
case 74: {
org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto m =
input.readMessage(
org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.PARSER,
extensionRegistry);
if (containersToBeRemovedFromNmBuilder_ == null) {
ensureContainersToBeRemovedFromNmIsMutable();
containersToBeRemovedFromNm_.add(m);
} else {
containersToBeRemovedFromNmBuilder_.addMessage(m);
}
break;
} // case 74
case 82: {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto m =
input.readMessage(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto.PARSER,
extensionRegistry);
if (systemCredentialsForAppsBuilder_ == null) {
ensureSystemCredentialsForAppsIsMutable();
systemCredentialsForApps_.add(m);
} else {
systemCredentialsForAppsBuilder_.addMessage(m);
}
break;
} // case 82
case 88: {
areNodeLabelsAcceptedByRM_ = input.readBool();
bitField0_ |= 0x00000400;
break;
} // case 88
case 98: {
org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto m =
input.readMessage(
org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.PARSER,
extensionRegistry);
if (containersToDecreaseBuilder_ == null) {
ensureContainersToDecreaseIsMutable();
containersToDecrease_.add(m);
} else {
containersToDecreaseBuilder_.addMessage(m);
}
break;
} // case 98
case 106: {
org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto m =
input.readMessage(
org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto.PARSER,
extensionRegistry);
if (containersToSignalBuilder_ == null) {
ensureContainersToSignalIsMutable();
containersToSignal_.add(m);
} else {
containersToSignalBuilder_.addMessage(m);
}
break;
} // case 106
case 114: {
input.readMessage(
getResourceFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00002000;
break;
} // case 114
case 122: {
input.readMessage(
getContainerQueuingLimitFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00004000;
break;
} // case 122
case 130: {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto m =
input.readMessage(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.PARSER,
extensionRegistry);
if (appCollectorsBuilder_ == null) {
ensureAppCollectorsIsMutable();
appCollectors_.add(m);
} else {
appCollectorsBuilder_.addMessage(m);
}
break;
} // case 130
case 138: {
org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto m =
input.readMessage(
org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.PARSER,
extensionRegistry);
if (containersToUpdateBuilder_ == null) {
ensureContainersToUpdateIsMutable();
containersToUpdate_.add(m);
} else {
containersToUpdateBuilder_.addMessage(m);
}
break;
} // case 138
case 144: {
areNodeAttributesAcceptedByRM_ = input.readBool();
bitField0_ |= 0x00020000;
break;
} // case 144
case 152: {
tokenSequenceNo_ = input.readInt64();
bitField0_ |= 0x00040000;
break;
} // case 152
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int responseId_ ;
/**
* optional int32 response_id = 1;
* @return Whether the responseId field is set.
*/
@java.lang.Override
public boolean hasResponseId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional int32 response_id = 1;
* @return The responseId.
*/
@java.lang.Override
public int getResponseId() {
return responseId_;
}
/**
* optional int32 response_id = 1;
* @param value The responseId to set.
* @return This builder for chaining.
*/
public Builder setResponseId(int value) {
responseId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional int32 response_id = 1;
* @return This builder for chaining.
*/
public Builder clearResponseId() {
bitField0_ = (bitField0_ & ~0x00000001);
responseId_ = 0;
onChanged();
return this;
}
private org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto containerTokenMasterKey_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder> containerTokenMasterKeyBuilder_;
/**
* optional .hadoop.yarn.MasterKeyProto container_token_master_key = 2;
* @return Whether the containerTokenMasterKey field is set.
*/
public boolean hasContainerTokenMasterKey() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional .hadoop.yarn.MasterKeyProto container_token_master_key = 2;
* @return The containerTokenMasterKey.
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto getContainerTokenMasterKey() {
if (containerTokenMasterKeyBuilder_ == null) {
return containerTokenMasterKey_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.getDefaultInstance() : containerTokenMasterKey_;
} else {
return containerTokenMasterKeyBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.MasterKeyProto container_token_master_key = 2;
*/
public Builder setContainerTokenMasterKey(org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto value) {
if (containerTokenMasterKeyBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
containerTokenMasterKey_ = value;
} else {
containerTokenMasterKeyBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.MasterKeyProto container_token_master_key = 2;
*/
public Builder setContainerTokenMasterKey(
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.Builder builderForValue) {
if (containerTokenMasterKeyBuilder_ == null) {
containerTokenMasterKey_ = builderForValue.build();
} else {
containerTokenMasterKeyBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.MasterKeyProto container_token_master_key = 2;
*/
public Builder mergeContainerTokenMasterKey(org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto value) {
if (containerTokenMasterKeyBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0) &&
containerTokenMasterKey_ != null &&
containerTokenMasterKey_ != org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.getDefaultInstance()) {
getContainerTokenMasterKeyBuilder().mergeFrom(value);
} else {
containerTokenMasterKey_ = value;
}
} else {
containerTokenMasterKeyBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.MasterKeyProto container_token_master_key = 2;
*/
public Builder clearContainerTokenMasterKey() {
bitField0_ = (bitField0_ & ~0x00000002);
containerTokenMasterKey_ = null;
if (containerTokenMasterKeyBuilder_ != null) {
containerTokenMasterKeyBuilder_.dispose();
containerTokenMasterKeyBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.MasterKeyProto container_token_master_key = 2;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.Builder getContainerTokenMasterKeyBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getContainerTokenMasterKeyFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.MasterKeyProto container_token_master_key = 2;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder getContainerTokenMasterKeyOrBuilder() {
if (containerTokenMasterKeyBuilder_ != null) {
return containerTokenMasterKeyBuilder_.getMessageOrBuilder();
} else {
return containerTokenMasterKey_ == null ?
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.getDefaultInstance() : containerTokenMasterKey_;
}
}
/**
* optional .hadoop.yarn.MasterKeyProto container_token_master_key = 2;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder>
getContainerTokenMasterKeyFieldBuilder() {
if (containerTokenMasterKeyBuilder_ == null) {
containerTokenMasterKeyBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder>(
getContainerTokenMasterKey(),
getParentForChildren(),
isClean());
containerTokenMasterKey_ = null;
}
return containerTokenMasterKeyBuilder_;
}
private org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto nmTokenMasterKey_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder> nmTokenMasterKeyBuilder_;
/**
* optional .hadoop.yarn.MasterKeyProto nm_token_master_key = 3;
* @return Whether the nmTokenMasterKey field is set.
*/
public boolean hasNmTokenMasterKey() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional .hadoop.yarn.MasterKeyProto nm_token_master_key = 3;
* @return The nmTokenMasterKey.
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto getNmTokenMasterKey() {
if (nmTokenMasterKeyBuilder_ == null) {
return nmTokenMasterKey_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.getDefaultInstance() : nmTokenMasterKey_;
} else {
return nmTokenMasterKeyBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.MasterKeyProto nm_token_master_key = 3;
*/
public Builder setNmTokenMasterKey(org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto value) {
if (nmTokenMasterKeyBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
nmTokenMasterKey_ = value;
} else {
nmTokenMasterKeyBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.MasterKeyProto nm_token_master_key = 3;
*/
public Builder setNmTokenMasterKey(
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.Builder builderForValue) {
if (nmTokenMasterKeyBuilder_ == null) {
nmTokenMasterKey_ = builderForValue.build();
} else {
nmTokenMasterKeyBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.MasterKeyProto nm_token_master_key = 3;
*/
public Builder mergeNmTokenMasterKey(org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto value) {
if (nmTokenMasterKeyBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0) &&
nmTokenMasterKey_ != null &&
nmTokenMasterKey_ != org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.getDefaultInstance()) {
getNmTokenMasterKeyBuilder().mergeFrom(value);
} else {
nmTokenMasterKey_ = value;
}
} else {
nmTokenMasterKeyBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.MasterKeyProto nm_token_master_key = 3;
*/
public Builder clearNmTokenMasterKey() {
bitField0_ = (bitField0_ & ~0x00000004);
nmTokenMasterKey_ = null;
if (nmTokenMasterKeyBuilder_ != null) {
nmTokenMasterKeyBuilder_.dispose();
nmTokenMasterKeyBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.MasterKeyProto nm_token_master_key = 3;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.Builder getNmTokenMasterKeyBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getNmTokenMasterKeyFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.MasterKeyProto nm_token_master_key = 3;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder getNmTokenMasterKeyOrBuilder() {
if (nmTokenMasterKeyBuilder_ != null) {
return nmTokenMasterKeyBuilder_.getMessageOrBuilder();
} else {
return nmTokenMasterKey_ == null ?
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.getDefaultInstance() : nmTokenMasterKey_;
}
}
/**
* optional .hadoop.yarn.MasterKeyProto nm_token_master_key = 3;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder>
getNmTokenMasterKeyFieldBuilder() {
if (nmTokenMasterKeyBuilder_ == null) {
nmTokenMasterKeyBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProtoOrBuilder>(
getNmTokenMasterKey(),
getParentForChildren(),
isClean());
nmTokenMasterKey_ = null;
}
return nmTokenMasterKeyBuilder_;
}
private int nodeAction_ = 0;
/**
* optional .hadoop.yarn.NodeActionProto nodeAction = 4;
* @return Whether the nodeAction field is set.
*/
@java.lang.Override public boolean hasNodeAction() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* optional .hadoop.yarn.NodeActionProto nodeAction = 4;
* @return The nodeAction.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeActionProto getNodeAction() {
org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeActionProto result = org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeActionProto.forNumber(nodeAction_);
return result == null ? org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeActionProto.NORMAL : result;
}
/**
* optional .hadoop.yarn.NodeActionProto nodeAction = 4;
* @param value The nodeAction to set.
* @return This builder for chaining.
*/
public Builder setNodeAction(org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeActionProto value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
nodeAction_ = value.getNumber();
onChanged();
return this;
}
/**
* optional .hadoop.yarn.NodeActionProto nodeAction = 4;
* @return This builder for chaining.
*/
public Builder clearNodeAction() {
bitField0_ = (bitField0_ & ~0x00000008);
nodeAction_ = 0;
onChanged();
return this;
}
private java.util.List containersToCleanup_ =
java.util.Collections.emptyList();
private void ensureContainersToCleanupIsMutable() {
if (!((bitField0_ & 0x00000010) != 0)) {
containersToCleanup_ = new java.util.ArrayList(containersToCleanup_);
bitField0_ |= 0x00000010;
}
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> containersToCleanupBuilder_;
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_cleanup = 5;
*/
public java.util.List getContainersToCleanupList() {
if (containersToCleanupBuilder_ == null) {
return java.util.Collections.unmodifiableList(containersToCleanup_);
} else {
return containersToCleanupBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_cleanup = 5;
*/
public int getContainersToCleanupCount() {
if (containersToCleanupBuilder_ == null) {
return containersToCleanup_.size();
} else {
return containersToCleanupBuilder_.getCount();
}
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_cleanup = 5;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainersToCleanup(int index) {
if (containersToCleanupBuilder_ == null) {
return containersToCleanup_.get(index);
} else {
return containersToCleanupBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_cleanup = 5;
*/
public Builder setContainersToCleanup(
int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
if (containersToCleanupBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureContainersToCleanupIsMutable();
containersToCleanup_.set(index, value);
onChanged();
} else {
containersToCleanupBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_cleanup = 5;
*/
public Builder setContainersToCleanup(
int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
if (containersToCleanupBuilder_ == null) {
ensureContainersToCleanupIsMutable();
containersToCleanup_.set(index, builderForValue.build());
onChanged();
} else {
containersToCleanupBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_cleanup = 5;
*/
public Builder addContainersToCleanup(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
if (containersToCleanupBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureContainersToCleanupIsMutable();
containersToCleanup_.add(value);
onChanged();
} else {
containersToCleanupBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_cleanup = 5;
*/
public Builder addContainersToCleanup(
int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
if (containersToCleanupBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureContainersToCleanupIsMutable();
containersToCleanup_.add(index, value);
onChanged();
} else {
containersToCleanupBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_cleanup = 5;
*/
public Builder addContainersToCleanup(
org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
if (containersToCleanupBuilder_ == null) {
ensureContainersToCleanupIsMutable();
containersToCleanup_.add(builderForValue.build());
onChanged();
} else {
containersToCleanupBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_cleanup = 5;
*/
public Builder addContainersToCleanup(
int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
if (containersToCleanupBuilder_ == null) {
ensureContainersToCleanupIsMutable();
containersToCleanup_.add(index, builderForValue.build());
onChanged();
} else {
containersToCleanupBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_cleanup = 5;
*/
public Builder addAllContainersToCleanup(
java.lang.Iterable extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> values) {
if (containersToCleanupBuilder_ == null) {
ensureContainersToCleanupIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, containersToCleanup_);
onChanged();
} else {
containersToCleanupBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_cleanup = 5;
*/
public Builder clearContainersToCleanup() {
if (containersToCleanupBuilder_ == null) {
containersToCleanup_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000010);
onChanged();
} else {
containersToCleanupBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_cleanup = 5;
*/
public Builder removeContainersToCleanup(int index) {
if (containersToCleanupBuilder_ == null) {
ensureContainersToCleanupIsMutable();
containersToCleanup_.remove(index);
onChanged();
} else {
containersToCleanupBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_cleanup = 5;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getContainersToCleanupBuilder(
int index) {
return getContainersToCleanupFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_cleanup = 5;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainersToCleanupOrBuilder(
int index) {
if (containersToCleanupBuilder_ == null) {
return containersToCleanup_.get(index); } else {
return containersToCleanupBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_cleanup = 5;
*/
public java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>
getContainersToCleanupOrBuilderList() {
if (containersToCleanupBuilder_ != null) {
return containersToCleanupBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(containersToCleanup_);
}
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_cleanup = 5;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder addContainersToCleanupBuilder() {
return getContainersToCleanupFieldBuilder().addBuilder(
org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_cleanup = 5;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder addContainersToCleanupBuilder(
int index) {
return getContainersToCleanupFieldBuilder().addBuilder(
index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_cleanup = 5;
*/
public java.util.List
getContainersToCleanupBuilderList() {
return getContainersToCleanupFieldBuilder().getBuilderList();
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>
getContainersToCleanupFieldBuilder() {
if (containersToCleanupBuilder_ == null) {
containersToCleanupBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>(
containersToCleanup_,
((bitField0_ & 0x00000010) != 0),
getParentForChildren(),
isClean());
containersToCleanup_ = null;
}
return containersToCleanupBuilder_;
}
private java.util.List applicationsToCleanup_ =
java.util.Collections.emptyList();
private void ensureApplicationsToCleanupIsMutable() {
if (!((bitField0_ & 0x00000020) != 0)) {
applicationsToCleanup_ = new java.util.ArrayList(applicationsToCleanup_);
bitField0_ |= 0x00000020;
}
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> applicationsToCleanupBuilder_;
/**
* repeated .hadoop.yarn.ApplicationIdProto applications_to_cleanup = 6;
*/
public java.util.List getApplicationsToCleanupList() {
if (applicationsToCleanupBuilder_ == null) {
return java.util.Collections.unmodifiableList(applicationsToCleanup_);
} else {
return applicationsToCleanupBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.yarn.ApplicationIdProto applications_to_cleanup = 6;
*/
public int getApplicationsToCleanupCount() {
if (applicationsToCleanupBuilder_ == null) {
return applicationsToCleanup_.size();
} else {
return applicationsToCleanupBuilder_.getCount();
}
}
/**
* repeated .hadoop.yarn.ApplicationIdProto applications_to_cleanup = 6;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationsToCleanup(int index) {
if (applicationsToCleanupBuilder_ == null) {
return applicationsToCleanup_.get(index);
} else {
return applicationsToCleanupBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.yarn.ApplicationIdProto applications_to_cleanup = 6;
*/
public Builder setApplicationsToCleanup(
int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
if (applicationsToCleanupBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureApplicationsToCleanupIsMutable();
applicationsToCleanup_.set(index, value);
onChanged();
} else {
applicationsToCleanupBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.ApplicationIdProto applications_to_cleanup = 6;
*/
public Builder setApplicationsToCleanup(
int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) {
if (applicationsToCleanupBuilder_ == null) {
ensureApplicationsToCleanupIsMutable();
applicationsToCleanup_.set(index, builderForValue.build());
onChanged();
} else {
applicationsToCleanupBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.ApplicationIdProto applications_to_cleanup = 6;
*/
public Builder addApplicationsToCleanup(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
if (applicationsToCleanupBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureApplicationsToCleanupIsMutable();
applicationsToCleanup_.add(value);
onChanged();
} else {
applicationsToCleanupBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.yarn.ApplicationIdProto applications_to_cleanup = 6;
*/
public Builder addApplicationsToCleanup(
int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
if (applicationsToCleanupBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureApplicationsToCleanupIsMutable();
applicationsToCleanup_.add(index, value);
onChanged();
} else {
applicationsToCleanupBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.ApplicationIdProto applications_to_cleanup = 6;
*/
public Builder addApplicationsToCleanup(
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) {
if (applicationsToCleanupBuilder_ == null) {
ensureApplicationsToCleanupIsMutable();
applicationsToCleanup_.add(builderForValue.build());
onChanged();
} else {
applicationsToCleanupBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.ApplicationIdProto applications_to_cleanup = 6;
*/
public Builder addApplicationsToCleanup(
int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) {
if (applicationsToCleanupBuilder_ == null) {
ensureApplicationsToCleanupIsMutable();
applicationsToCleanup_.add(index, builderForValue.build());
onChanged();
} else {
applicationsToCleanupBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.ApplicationIdProto applications_to_cleanup = 6;
*/
public Builder addAllApplicationsToCleanup(
java.lang.Iterable extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto> values) {
if (applicationsToCleanupBuilder_ == null) {
ensureApplicationsToCleanupIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, applicationsToCleanup_);
onChanged();
} else {
applicationsToCleanupBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.yarn.ApplicationIdProto applications_to_cleanup = 6;
*/
public Builder clearApplicationsToCleanup() {
if (applicationsToCleanupBuilder_ == null) {
applicationsToCleanup_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000020);
onChanged();
} else {
applicationsToCleanupBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.yarn.ApplicationIdProto applications_to_cleanup = 6;
*/
public Builder removeApplicationsToCleanup(int index) {
if (applicationsToCleanupBuilder_ == null) {
ensureApplicationsToCleanupIsMutable();
applicationsToCleanup_.remove(index);
onChanged();
} else {
applicationsToCleanupBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.yarn.ApplicationIdProto applications_to_cleanup = 6;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getApplicationsToCleanupBuilder(
int index) {
return getApplicationsToCleanupFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.yarn.ApplicationIdProto applications_to_cleanup = 6;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationsToCleanupOrBuilder(
int index) {
if (applicationsToCleanupBuilder_ == null) {
return applicationsToCleanup_.get(index); } else {
return applicationsToCleanupBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.yarn.ApplicationIdProto applications_to_cleanup = 6;
*/
public java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>
getApplicationsToCleanupOrBuilderList() {
if (applicationsToCleanupBuilder_ != null) {
return applicationsToCleanupBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(applicationsToCleanup_);
}
}
/**
* repeated .hadoop.yarn.ApplicationIdProto applications_to_cleanup = 6;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder addApplicationsToCleanupBuilder() {
return getApplicationsToCleanupFieldBuilder().addBuilder(
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.ApplicationIdProto applications_to_cleanup = 6;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder addApplicationsToCleanupBuilder(
int index) {
return getApplicationsToCleanupFieldBuilder().addBuilder(
index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.ApplicationIdProto applications_to_cleanup = 6;
*/
public java.util.List
getApplicationsToCleanupBuilderList() {
return getApplicationsToCleanupFieldBuilder().getBuilderList();
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>
getApplicationsToCleanupFieldBuilder() {
if (applicationsToCleanupBuilder_ == null) {
applicationsToCleanupBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>(
applicationsToCleanup_,
((bitField0_ & 0x00000020) != 0),
getParentForChildren(),
isClean());
applicationsToCleanup_ = null;
}
return applicationsToCleanupBuilder_;
}
private long nextHeartBeatInterval_ ;
/**
* optional int64 nextHeartBeatInterval = 7;
* @return Whether the nextHeartBeatInterval field is set.
*/
@java.lang.Override
public boolean hasNextHeartBeatInterval() {
return ((bitField0_ & 0x00000040) != 0);
}
/**
* optional int64 nextHeartBeatInterval = 7;
* @return The nextHeartBeatInterval.
*/
@java.lang.Override
public long getNextHeartBeatInterval() {
return nextHeartBeatInterval_;
}
/**
* optional int64 nextHeartBeatInterval = 7;
* @param value The nextHeartBeatInterval to set.
* @return This builder for chaining.
*/
public Builder setNextHeartBeatInterval(long value) {
nextHeartBeatInterval_ = value;
bitField0_ |= 0x00000040;
onChanged();
return this;
}
/**
* optional int64 nextHeartBeatInterval = 7;
* @return This builder for chaining.
*/
public Builder clearNextHeartBeatInterval() {
bitField0_ = (bitField0_ & ~0x00000040);
nextHeartBeatInterval_ = 0L;
onChanged();
return this;
}
private java.lang.Object diagnosticsMessage_ = "";
/**
* optional string diagnostics_message = 8;
* @return Whether the diagnosticsMessage field is set.
*/
public boolean hasDiagnosticsMessage() {
return ((bitField0_ & 0x00000080) != 0);
}
/**
* optional string diagnostics_message = 8;
* @return The diagnosticsMessage.
*/
public java.lang.String getDiagnosticsMessage() {
java.lang.Object ref = diagnosticsMessage_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
diagnosticsMessage_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string diagnostics_message = 8;
* @return The bytes for diagnosticsMessage.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getDiagnosticsMessageBytes() {
java.lang.Object ref = diagnosticsMessage_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
diagnosticsMessage_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string diagnostics_message = 8;
* @param value The diagnosticsMessage to set.
* @return This builder for chaining.
*/
public Builder setDiagnosticsMessage(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
diagnosticsMessage_ = value;
bitField0_ |= 0x00000080;
onChanged();
return this;
}
/**
* optional string diagnostics_message = 8;
* @return This builder for chaining.
*/
public Builder clearDiagnosticsMessage() {
diagnosticsMessage_ = getDefaultInstance().getDiagnosticsMessage();
bitField0_ = (bitField0_ & ~0x00000080);
onChanged();
return this;
}
/**
* optional string diagnostics_message = 8;
* @param value The bytes for diagnosticsMessage to set.
* @return This builder for chaining.
*/
public Builder setDiagnosticsMessageBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
diagnosticsMessage_ = value;
bitField0_ |= 0x00000080;
onChanged();
return this;
}
private java.util.List containersToBeRemovedFromNm_ =
java.util.Collections.emptyList();
private void ensureContainersToBeRemovedFromNmIsMutable() {
if (!((bitField0_ & 0x00000100) != 0)) {
containersToBeRemovedFromNm_ = new java.util.ArrayList(containersToBeRemovedFromNm_);
bitField0_ |= 0x00000100;
}
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> containersToBeRemovedFromNmBuilder_;
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_be_removed_from_nm = 9;
*/
public java.util.List getContainersToBeRemovedFromNmList() {
if (containersToBeRemovedFromNmBuilder_ == null) {
return java.util.Collections.unmodifiableList(containersToBeRemovedFromNm_);
} else {
return containersToBeRemovedFromNmBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_be_removed_from_nm = 9;
*/
public int getContainersToBeRemovedFromNmCount() {
if (containersToBeRemovedFromNmBuilder_ == null) {
return containersToBeRemovedFromNm_.size();
} else {
return containersToBeRemovedFromNmBuilder_.getCount();
}
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_be_removed_from_nm = 9;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainersToBeRemovedFromNm(int index) {
if (containersToBeRemovedFromNmBuilder_ == null) {
return containersToBeRemovedFromNm_.get(index);
} else {
return containersToBeRemovedFromNmBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_be_removed_from_nm = 9;
*/
public Builder setContainersToBeRemovedFromNm(
int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
if (containersToBeRemovedFromNmBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureContainersToBeRemovedFromNmIsMutable();
containersToBeRemovedFromNm_.set(index, value);
onChanged();
} else {
containersToBeRemovedFromNmBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_be_removed_from_nm = 9;
*/
public Builder setContainersToBeRemovedFromNm(
int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
if (containersToBeRemovedFromNmBuilder_ == null) {
ensureContainersToBeRemovedFromNmIsMutable();
containersToBeRemovedFromNm_.set(index, builderForValue.build());
onChanged();
} else {
containersToBeRemovedFromNmBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_be_removed_from_nm = 9;
*/
public Builder addContainersToBeRemovedFromNm(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
if (containersToBeRemovedFromNmBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureContainersToBeRemovedFromNmIsMutable();
containersToBeRemovedFromNm_.add(value);
onChanged();
} else {
containersToBeRemovedFromNmBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_be_removed_from_nm = 9;
*/
public Builder addContainersToBeRemovedFromNm(
int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
if (containersToBeRemovedFromNmBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureContainersToBeRemovedFromNmIsMutable();
containersToBeRemovedFromNm_.add(index, value);
onChanged();
} else {
containersToBeRemovedFromNmBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_be_removed_from_nm = 9;
*/
public Builder addContainersToBeRemovedFromNm(
org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
if (containersToBeRemovedFromNmBuilder_ == null) {
ensureContainersToBeRemovedFromNmIsMutable();
containersToBeRemovedFromNm_.add(builderForValue.build());
onChanged();
} else {
containersToBeRemovedFromNmBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_be_removed_from_nm = 9;
*/
public Builder addContainersToBeRemovedFromNm(
int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
if (containersToBeRemovedFromNmBuilder_ == null) {
ensureContainersToBeRemovedFromNmIsMutable();
containersToBeRemovedFromNm_.add(index, builderForValue.build());
onChanged();
} else {
containersToBeRemovedFromNmBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_be_removed_from_nm = 9;
*/
public Builder addAllContainersToBeRemovedFromNm(
java.lang.Iterable extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto> values) {
if (containersToBeRemovedFromNmBuilder_ == null) {
ensureContainersToBeRemovedFromNmIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, containersToBeRemovedFromNm_);
onChanged();
} else {
containersToBeRemovedFromNmBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_be_removed_from_nm = 9;
*/
public Builder clearContainersToBeRemovedFromNm() {
if (containersToBeRemovedFromNmBuilder_ == null) {
containersToBeRemovedFromNm_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000100);
onChanged();
} else {
containersToBeRemovedFromNmBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_be_removed_from_nm = 9;
*/
public Builder removeContainersToBeRemovedFromNm(int index) {
if (containersToBeRemovedFromNmBuilder_ == null) {
ensureContainersToBeRemovedFromNmIsMutable();
containersToBeRemovedFromNm_.remove(index);
onChanged();
} else {
containersToBeRemovedFromNmBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_be_removed_from_nm = 9;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getContainersToBeRemovedFromNmBuilder(
int index) {
return getContainersToBeRemovedFromNmFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_be_removed_from_nm = 9;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainersToBeRemovedFromNmOrBuilder(
int index) {
if (containersToBeRemovedFromNmBuilder_ == null) {
return containersToBeRemovedFromNm_.get(index); } else {
return containersToBeRemovedFromNmBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_be_removed_from_nm = 9;
*/
public java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>
getContainersToBeRemovedFromNmOrBuilderList() {
if (containersToBeRemovedFromNmBuilder_ != null) {
return containersToBeRemovedFromNmBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(containersToBeRemovedFromNm_);
}
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_be_removed_from_nm = 9;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder addContainersToBeRemovedFromNmBuilder() {
return getContainersToBeRemovedFromNmFieldBuilder().addBuilder(
org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_be_removed_from_nm = 9;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder addContainersToBeRemovedFromNmBuilder(
int index) {
return getContainersToBeRemovedFromNmFieldBuilder().addBuilder(
index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.ContainerIdProto containers_to_be_removed_from_nm = 9;
*/
public java.util.List
getContainersToBeRemovedFromNmBuilderList() {
return getContainersToBeRemovedFromNmFieldBuilder().getBuilderList();
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>
getContainersToBeRemovedFromNmFieldBuilder() {
if (containersToBeRemovedFromNmBuilder_ == null) {
containersToBeRemovedFromNmBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>(
containersToBeRemovedFromNm_,
((bitField0_ & 0x00000100) != 0),
getParentForChildren(),
isClean());
containersToBeRemovedFromNm_ = null;
}
return containersToBeRemovedFromNmBuilder_;
}
private java.util.List systemCredentialsForApps_ =
java.util.Collections.emptyList();
private void ensureSystemCredentialsForAppsIsMutable() {
if (!((bitField0_ & 0x00000200) != 0)) {
systemCredentialsForApps_ = new java.util.ArrayList(systemCredentialsForApps_);
bitField0_ |= 0x00000200;
}
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProtoOrBuilder> systemCredentialsForAppsBuilder_;
/**
* repeated .hadoop.yarn.SystemCredentialsForAppsProto system_credentials_for_apps = 10;
*/
public java.util.List getSystemCredentialsForAppsList() {
if (systemCredentialsForAppsBuilder_ == null) {
return java.util.Collections.unmodifiableList(systemCredentialsForApps_);
} else {
return systemCredentialsForAppsBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.yarn.SystemCredentialsForAppsProto system_credentials_for_apps = 10;
*/
public int getSystemCredentialsForAppsCount() {
if (systemCredentialsForAppsBuilder_ == null) {
return systemCredentialsForApps_.size();
} else {
return systemCredentialsForAppsBuilder_.getCount();
}
}
/**
* repeated .hadoop.yarn.SystemCredentialsForAppsProto system_credentials_for_apps = 10;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto getSystemCredentialsForApps(int index) {
if (systemCredentialsForAppsBuilder_ == null) {
return systemCredentialsForApps_.get(index);
} else {
return systemCredentialsForAppsBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.yarn.SystemCredentialsForAppsProto system_credentials_for_apps = 10;
*/
public Builder setSystemCredentialsForApps(
int index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto value) {
if (systemCredentialsForAppsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSystemCredentialsForAppsIsMutable();
systemCredentialsForApps_.set(index, value);
onChanged();
} else {
systemCredentialsForAppsBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.SystemCredentialsForAppsProto system_credentials_for_apps = 10;
*/
public Builder setSystemCredentialsForApps(
int index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto.Builder builderForValue) {
if (systemCredentialsForAppsBuilder_ == null) {
ensureSystemCredentialsForAppsIsMutable();
systemCredentialsForApps_.set(index, builderForValue.build());
onChanged();
} else {
systemCredentialsForAppsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.SystemCredentialsForAppsProto system_credentials_for_apps = 10;
*/
public Builder addSystemCredentialsForApps(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto value) {
if (systemCredentialsForAppsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSystemCredentialsForAppsIsMutable();
systemCredentialsForApps_.add(value);
onChanged();
} else {
systemCredentialsForAppsBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.yarn.SystemCredentialsForAppsProto system_credentials_for_apps = 10;
*/
public Builder addSystemCredentialsForApps(
int index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto value) {
if (systemCredentialsForAppsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSystemCredentialsForAppsIsMutable();
systemCredentialsForApps_.add(index, value);
onChanged();
} else {
systemCredentialsForAppsBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.SystemCredentialsForAppsProto system_credentials_for_apps = 10;
*/
public Builder addSystemCredentialsForApps(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto.Builder builderForValue) {
if (systemCredentialsForAppsBuilder_ == null) {
ensureSystemCredentialsForAppsIsMutable();
systemCredentialsForApps_.add(builderForValue.build());
onChanged();
} else {
systemCredentialsForAppsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.SystemCredentialsForAppsProto system_credentials_for_apps = 10;
*/
public Builder addSystemCredentialsForApps(
int index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto.Builder builderForValue) {
if (systemCredentialsForAppsBuilder_ == null) {
ensureSystemCredentialsForAppsIsMutable();
systemCredentialsForApps_.add(index, builderForValue.build());
onChanged();
} else {
systemCredentialsForAppsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.SystemCredentialsForAppsProto system_credentials_for_apps = 10;
*/
public Builder addAllSystemCredentialsForApps(
java.lang.Iterable extends org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto> values) {
if (systemCredentialsForAppsBuilder_ == null) {
ensureSystemCredentialsForAppsIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, systemCredentialsForApps_);
onChanged();
} else {
systemCredentialsForAppsBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.yarn.SystemCredentialsForAppsProto system_credentials_for_apps = 10;
*/
public Builder clearSystemCredentialsForApps() {
if (systemCredentialsForAppsBuilder_ == null) {
systemCredentialsForApps_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000200);
onChanged();
} else {
systemCredentialsForAppsBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.yarn.SystemCredentialsForAppsProto system_credentials_for_apps = 10;
*/
public Builder removeSystemCredentialsForApps(int index) {
if (systemCredentialsForAppsBuilder_ == null) {
ensureSystemCredentialsForAppsIsMutable();
systemCredentialsForApps_.remove(index);
onChanged();
} else {
systemCredentialsForAppsBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.yarn.SystemCredentialsForAppsProto system_credentials_for_apps = 10;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto.Builder getSystemCredentialsForAppsBuilder(
int index) {
return getSystemCredentialsForAppsFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.yarn.SystemCredentialsForAppsProto system_credentials_for_apps = 10;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProtoOrBuilder getSystemCredentialsForAppsOrBuilder(
int index) {
if (systemCredentialsForAppsBuilder_ == null) {
return systemCredentialsForApps_.get(index); } else {
return systemCredentialsForAppsBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.yarn.SystemCredentialsForAppsProto system_credentials_for_apps = 10;
*/
public java.util.List extends org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProtoOrBuilder>
getSystemCredentialsForAppsOrBuilderList() {
if (systemCredentialsForAppsBuilder_ != null) {
return systemCredentialsForAppsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(systemCredentialsForApps_);
}
}
/**
* repeated .hadoop.yarn.SystemCredentialsForAppsProto system_credentials_for_apps = 10;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto.Builder addSystemCredentialsForAppsBuilder() {
return getSystemCredentialsForAppsFieldBuilder().addBuilder(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.SystemCredentialsForAppsProto system_credentials_for_apps = 10;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto.Builder addSystemCredentialsForAppsBuilder(
int index) {
return getSystemCredentialsForAppsFieldBuilder().addBuilder(
index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.SystemCredentialsForAppsProto system_credentials_for_apps = 10;
*/
public java.util.List
getSystemCredentialsForAppsBuilderList() {
return getSystemCredentialsForAppsFieldBuilder().getBuilderList();
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProtoOrBuilder>
getSystemCredentialsForAppsFieldBuilder() {
if (systemCredentialsForAppsBuilder_ == null) {
systemCredentialsForAppsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProtoOrBuilder>(
systemCredentialsForApps_,
((bitField0_ & 0x00000200) != 0),
getParentForChildren(),
isClean());
systemCredentialsForApps_ = null;
}
return systemCredentialsForAppsBuilder_;
}
private boolean areNodeLabelsAcceptedByRM_ ;
/**
* optional bool areNodeLabelsAcceptedByRM = 11 [default = false];
* @return Whether the areNodeLabelsAcceptedByRM field is set.
*/
@java.lang.Override
public boolean hasAreNodeLabelsAcceptedByRM() {
return ((bitField0_ & 0x00000400) != 0);
}
/**
* optional bool areNodeLabelsAcceptedByRM = 11 [default = false];
* @return The areNodeLabelsAcceptedByRM.
*/
@java.lang.Override
public boolean getAreNodeLabelsAcceptedByRM() {
return areNodeLabelsAcceptedByRM_;
}
/**
* optional bool areNodeLabelsAcceptedByRM = 11 [default = false];
* @param value The areNodeLabelsAcceptedByRM to set.
* @return This builder for chaining.
*/
public Builder setAreNodeLabelsAcceptedByRM(boolean value) {
areNodeLabelsAcceptedByRM_ = value;
bitField0_ |= 0x00000400;
onChanged();
return this;
}
/**
* optional bool areNodeLabelsAcceptedByRM = 11 [default = false];
* @return This builder for chaining.
*/
public Builder clearAreNodeLabelsAcceptedByRM() {
bitField0_ = (bitField0_ & ~0x00000400);
areNodeLabelsAcceptedByRM_ = false;
onChanged();
return this;
}
private java.util.List containersToDecrease_ =
java.util.Collections.emptyList();
private void ensureContainersToDecreaseIsMutable() {
if (!((bitField0_ & 0x00000800) != 0)) {
containersToDecrease_ = new java.util.ArrayList(containersToDecrease_);
bitField0_ |= 0x00000800;
}
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder> containersToDecreaseBuilder_;
/**
*
* to be deprecated in favour of containers_to_update
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_decrease = 12;
*/
public java.util.List getContainersToDecreaseList() {
if (containersToDecreaseBuilder_ == null) {
return java.util.Collections.unmodifiableList(containersToDecrease_);
} else {
return containersToDecreaseBuilder_.getMessageList();
}
}
/**
*
* to be deprecated in favour of containers_to_update
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_decrease = 12;
*/
public int getContainersToDecreaseCount() {
if (containersToDecreaseBuilder_ == null) {
return containersToDecrease_.size();
} else {
return containersToDecreaseBuilder_.getCount();
}
}
/**
*
* to be deprecated in favour of containers_to_update
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_decrease = 12;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getContainersToDecrease(int index) {
if (containersToDecreaseBuilder_ == null) {
return containersToDecrease_.get(index);
} else {
return containersToDecreaseBuilder_.getMessage(index);
}
}
/**
*
* to be deprecated in favour of containers_to_update
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_decrease = 12;
*/
public Builder setContainersToDecrease(
int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto value) {
if (containersToDecreaseBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureContainersToDecreaseIsMutable();
containersToDecrease_.set(index, value);
onChanged();
} else {
containersToDecreaseBuilder_.setMessage(index, value);
}
return this;
}
/**
*
* to be deprecated in favour of containers_to_update
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_decrease = 12;
*/
public Builder setContainersToDecrease(
int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder builderForValue) {
if (containersToDecreaseBuilder_ == null) {
ensureContainersToDecreaseIsMutable();
containersToDecrease_.set(index, builderForValue.build());
onChanged();
} else {
containersToDecreaseBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
* to be deprecated in favour of containers_to_update
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_decrease = 12;
*/
public Builder addContainersToDecrease(org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto value) {
if (containersToDecreaseBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureContainersToDecreaseIsMutable();
containersToDecrease_.add(value);
onChanged();
} else {
containersToDecreaseBuilder_.addMessage(value);
}
return this;
}
/**
*
* to be deprecated in favour of containers_to_update
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_decrease = 12;
*/
public Builder addContainersToDecrease(
int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto value) {
if (containersToDecreaseBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureContainersToDecreaseIsMutable();
containersToDecrease_.add(index, value);
onChanged();
} else {
containersToDecreaseBuilder_.addMessage(index, value);
}
return this;
}
/**
*
* to be deprecated in favour of containers_to_update
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_decrease = 12;
*/
public Builder addContainersToDecrease(
org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder builderForValue) {
if (containersToDecreaseBuilder_ == null) {
ensureContainersToDecreaseIsMutable();
containersToDecrease_.add(builderForValue.build());
onChanged();
} else {
containersToDecreaseBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
* to be deprecated in favour of containers_to_update
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_decrease = 12;
*/
public Builder addContainersToDecrease(
int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder builderForValue) {
if (containersToDecreaseBuilder_ == null) {
ensureContainersToDecreaseIsMutable();
containersToDecrease_.add(index, builderForValue.build());
onChanged();
} else {
containersToDecreaseBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
* to be deprecated in favour of containers_to_update
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_decrease = 12;
*/
public Builder addAllContainersToDecrease(
java.lang.Iterable extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto> values) {
if (containersToDecreaseBuilder_ == null) {
ensureContainersToDecreaseIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, containersToDecrease_);
onChanged();
} else {
containersToDecreaseBuilder_.addAllMessages(values);
}
return this;
}
/**
*
* to be deprecated in favour of containers_to_update
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_decrease = 12;
*/
public Builder clearContainersToDecrease() {
if (containersToDecreaseBuilder_ == null) {
containersToDecrease_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000800);
onChanged();
} else {
containersToDecreaseBuilder_.clear();
}
return this;
}
/**
*
* to be deprecated in favour of containers_to_update
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_decrease = 12;
*/
public Builder removeContainersToDecrease(int index) {
if (containersToDecreaseBuilder_ == null) {
ensureContainersToDecreaseIsMutable();
containersToDecrease_.remove(index);
onChanged();
} else {
containersToDecreaseBuilder_.remove(index);
}
return this;
}
/**
*
* to be deprecated in favour of containers_to_update
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_decrease = 12;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder getContainersToDecreaseBuilder(
int index) {
return getContainersToDecreaseFieldBuilder().getBuilder(index);
}
/**
*
* to be deprecated in favour of containers_to_update
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_decrease = 12;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder getContainersToDecreaseOrBuilder(
int index) {
if (containersToDecreaseBuilder_ == null) {
return containersToDecrease_.get(index); } else {
return containersToDecreaseBuilder_.getMessageOrBuilder(index);
}
}
/**
*
* to be deprecated in favour of containers_to_update
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_decrease = 12;
*/
public java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder>
getContainersToDecreaseOrBuilderList() {
if (containersToDecreaseBuilder_ != null) {
return containersToDecreaseBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(containersToDecrease_);
}
}
/**
*
* to be deprecated in favour of containers_to_update
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_decrease = 12;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder addContainersToDecreaseBuilder() {
return getContainersToDecreaseFieldBuilder().addBuilder(
org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.getDefaultInstance());
}
/**
*
* to be deprecated in favour of containers_to_update
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_decrease = 12;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder addContainersToDecreaseBuilder(
int index) {
return getContainersToDecreaseFieldBuilder().addBuilder(
index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.getDefaultInstance());
}
/**
*
* to be deprecated in favour of containers_to_update
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_decrease = 12;
*/
public java.util.List
getContainersToDecreaseBuilderList() {
return getContainersToDecreaseFieldBuilder().getBuilderList();
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder>
getContainersToDecreaseFieldBuilder() {
if (containersToDecreaseBuilder_ == null) {
containersToDecreaseBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder>(
containersToDecrease_,
((bitField0_ & 0x00000800) != 0),
getParentForChildren(),
isClean());
containersToDecrease_ = null;
}
return containersToDecreaseBuilder_;
}
private java.util.List containersToSignal_ =
java.util.Collections.emptyList();
private void ensureContainersToSignalIsMutable() {
if (!((bitField0_ & 0x00001000) != 0)) {
containersToSignal_ = new java.util.ArrayList(containersToSignal_);
bitField0_ |= 0x00001000;
}
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProtoOrBuilder> containersToSignalBuilder_;
/**
* repeated .hadoop.yarn.SignalContainerRequestProto containers_to_signal = 13;
*/
public java.util.List getContainersToSignalList() {
if (containersToSignalBuilder_ == null) {
return java.util.Collections.unmodifiableList(containersToSignal_);
} else {
return containersToSignalBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.yarn.SignalContainerRequestProto containers_to_signal = 13;
*/
public int getContainersToSignalCount() {
if (containersToSignalBuilder_ == null) {
return containersToSignal_.size();
} else {
return containersToSignalBuilder_.getCount();
}
}
/**
* repeated .hadoop.yarn.SignalContainerRequestProto containers_to_signal = 13;
*/
public org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto getContainersToSignal(int index) {
if (containersToSignalBuilder_ == null) {
return containersToSignal_.get(index);
} else {
return containersToSignalBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.yarn.SignalContainerRequestProto containers_to_signal = 13;
*/
public Builder setContainersToSignal(
int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto value) {
if (containersToSignalBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureContainersToSignalIsMutable();
containersToSignal_.set(index, value);
onChanged();
} else {
containersToSignalBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.SignalContainerRequestProto containers_to_signal = 13;
*/
public Builder setContainersToSignal(
int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto.Builder builderForValue) {
if (containersToSignalBuilder_ == null) {
ensureContainersToSignalIsMutable();
containersToSignal_.set(index, builderForValue.build());
onChanged();
} else {
containersToSignalBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.SignalContainerRequestProto containers_to_signal = 13;
*/
public Builder addContainersToSignal(org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto value) {
if (containersToSignalBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureContainersToSignalIsMutable();
containersToSignal_.add(value);
onChanged();
} else {
containersToSignalBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.yarn.SignalContainerRequestProto containers_to_signal = 13;
*/
public Builder addContainersToSignal(
int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto value) {
if (containersToSignalBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureContainersToSignalIsMutable();
containersToSignal_.add(index, value);
onChanged();
} else {
containersToSignalBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.SignalContainerRequestProto containers_to_signal = 13;
*/
public Builder addContainersToSignal(
org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto.Builder builderForValue) {
if (containersToSignalBuilder_ == null) {
ensureContainersToSignalIsMutable();
containersToSignal_.add(builderForValue.build());
onChanged();
} else {
containersToSignalBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.SignalContainerRequestProto containers_to_signal = 13;
*/
public Builder addContainersToSignal(
int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto.Builder builderForValue) {
if (containersToSignalBuilder_ == null) {
ensureContainersToSignalIsMutable();
containersToSignal_.add(index, builderForValue.build());
onChanged();
} else {
containersToSignalBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.SignalContainerRequestProto containers_to_signal = 13;
*/
public Builder addAllContainersToSignal(
java.lang.Iterable extends org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto> values) {
if (containersToSignalBuilder_ == null) {
ensureContainersToSignalIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, containersToSignal_);
onChanged();
} else {
containersToSignalBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.yarn.SignalContainerRequestProto containers_to_signal = 13;
*/
public Builder clearContainersToSignal() {
if (containersToSignalBuilder_ == null) {
containersToSignal_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00001000);
onChanged();
} else {
containersToSignalBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.yarn.SignalContainerRequestProto containers_to_signal = 13;
*/
public Builder removeContainersToSignal(int index) {
if (containersToSignalBuilder_ == null) {
ensureContainersToSignalIsMutable();
containersToSignal_.remove(index);
onChanged();
} else {
containersToSignalBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.yarn.SignalContainerRequestProto containers_to_signal = 13;
*/
public org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto.Builder getContainersToSignalBuilder(
int index) {
return getContainersToSignalFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.yarn.SignalContainerRequestProto containers_to_signal = 13;
*/
public org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProtoOrBuilder getContainersToSignalOrBuilder(
int index) {
if (containersToSignalBuilder_ == null) {
return containersToSignal_.get(index); } else {
return containersToSignalBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.yarn.SignalContainerRequestProto containers_to_signal = 13;
*/
public java.util.List extends org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProtoOrBuilder>
getContainersToSignalOrBuilderList() {
if (containersToSignalBuilder_ != null) {
return containersToSignalBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(containersToSignal_);
}
}
/**
* repeated .hadoop.yarn.SignalContainerRequestProto containers_to_signal = 13;
*/
public org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto.Builder addContainersToSignalBuilder() {
return getContainersToSignalFieldBuilder().addBuilder(
org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.SignalContainerRequestProto containers_to_signal = 13;
*/
public org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto.Builder addContainersToSignalBuilder(
int index) {
return getContainersToSignalFieldBuilder().addBuilder(
index, org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.SignalContainerRequestProto containers_to_signal = 13;
*/
public java.util.List
getContainersToSignalBuilderList() {
return getContainersToSignalFieldBuilder().getBuilderList();
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProtoOrBuilder>
getContainersToSignalFieldBuilder() {
if (containersToSignalBuilder_ == null) {
containersToSignalBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProtoOrBuilder>(
containersToSignal_,
((bitField0_ & 0x00001000) != 0),
getParentForChildren(),
isClean());
containersToSignal_ = null;
}
return containersToSignalBuilder_;
}
private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resource_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> resourceBuilder_;
/**
* optional .hadoop.yarn.ResourceProto resource = 14;
* @return Whether the resource field is set.
*/
public boolean hasResource() {
return ((bitField0_ & 0x00002000) != 0);
}
/**
* optional .hadoop.yarn.ResourceProto resource = 14;
* @return The resource.
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource() {
if (resourceBuilder_ == null) {
return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
} else {
return resourceBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.ResourceProto resource = 14;
*/
public Builder setResource(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
if (resourceBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
resource_ = value;
} else {
resourceBuilder_.setMessage(value);
}
bitField0_ |= 0x00002000;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ResourceProto resource = 14;
*/
public Builder setResource(
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) {
if (resourceBuilder_ == null) {
resource_ = builderForValue.build();
} else {
resourceBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00002000;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ResourceProto resource = 14;
*/
public Builder mergeResource(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
if (resourceBuilder_ == null) {
if (((bitField0_ & 0x00002000) != 0) &&
resource_ != null &&
resource_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) {
getResourceBuilder().mergeFrom(value);
} else {
resource_ = value;
}
} else {
resourceBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00002000;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ResourceProto resource = 14;
*/
public Builder clearResource() {
bitField0_ = (bitField0_ & ~0x00002000);
resource_ = null;
if (resourceBuilder_ != null) {
resourceBuilder_.dispose();
resourceBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ResourceProto resource = 14;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getResourceBuilder() {
bitField0_ |= 0x00002000;
onChanged();
return getResourceFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.ResourceProto resource = 14;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder() {
if (resourceBuilder_ != null) {
return resourceBuilder_.getMessageOrBuilder();
} else {
return resource_ == null ?
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
}
}
/**
* optional .hadoop.yarn.ResourceProto resource = 14;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>
getResourceFieldBuilder() {
if (resourceBuilder_ == null) {
resourceBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>(
getResource(),
getParentForChildren(),
isClean());
resource_ = null;
}
return resourceBuilder_;
}
private org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto containerQueuingLimit_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProtoOrBuilder> containerQueuingLimitBuilder_;
/**
* optional .hadoop.yarn.ContainerQueuingLimitProto container_queuing_limit = 15;
* @return Whether the containerQueuingLimit field is set.
*/
public boolean hasContainerQueuingLimit() {
return ((bitField0_ & 0x00004000) != 0);
}
/**
* optional .hadoop.yarn.ContainerQueuingLimitProto container_queuing_limit = 15;
* @return The containerQueuingLimit.
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto getContainerQueuingLimit() {
if (containerQueuingLimitBuilder_ == null) {
return containerQueuingLimit_ == null ? org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto.getDefaultInstance() : containerQueuingLimit_;
} else {
return containerQueuingLimitBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.ContainerQueuingLimitProto container_queuing_limit = 15;
*/
public Builder setContainerQueuingLimit(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto value) {
if (containerQueuingLimitBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
containerQueuingLimit_ = value;
} else {
containerQueuingLimitBuilder_.setMessage(value);
}
bitField0_ |= 0x00004000;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ContainerQueuingLimitProto container_queuing_limit = 15;
*/
public Builder setContainerQueuingLimit(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto.Builder builderForValue) {
if (containerQueuingLimitBuilder_ == null) {
containerQueuingLimit_ = builderForValue.build();
} else {
containerQueuingLimitBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00004000;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ContainerQueuingLimitProto container_queuing_limit = 15;
*/
public Builder mergeContainerQueuingLimit(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto value) {
if (containerQueuingLimitBuilder_ == null) {
if (((bitField0_ & 0x00004000) != 0) &&
containerQueuingLimit_ != null &&
containerQueuingLimit_ != org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto.getDefaultInstance()) {
getContainerQueuingLimitBuilder().mergeFrom(value);
} else {
containerQueuingLimit_ = value;
}
} else {
containerQueuingLimitBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00004000;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ContainerQueuingLimitProto container_queuing_limit = 15;
*/
public Builder clearContainerQueuingLimit() {
bitField0_ = (bitField0_ & ~0x00004000);
containerQueuingLimit_ = null;
if (containerQueuingLimitBuilder_ != null) {
containerQueuingLimitBuilder_.dispose();
containerQueuingLimitBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ContainerQueuingLimitProto container_queuing_limit = 15;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto.Builder getContainerQueuingLimitBuilder() {
bitField0_ |= 0x00004000;
onChanged();
return getContainerQueuingLimitFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.ContainerQueuingLimitProto container_queuing_limit = 15;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProtoOrBuilder getContainerQueuingLimitOrBuilder() {
if (containerQueuingLimitBuilder_ != null) {
return containerQueuingLimitBuilder_.getMessageOrBuilder();
} else {
return containerQueuingLimit_ == null ?
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto.getDefaultInstance() : containerQueuingLimit_;
}
}
/**
* optional .hadoop.yarn.ContainerQueuingLimitProto container_queuing_limit = 15;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProtoOrBuilder>
getContainerQueuingLimitFieldBuilder() {
if (containerQueuingLimitBuilder_ == null) {
containerQueuingLimitBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProtoOrBuilder>(
getContainerQueuingLimit(),
getParentForChildren(),
isClean());
containerQueuingLimit_ = null;
}
return containerQueuingLimitBuilder_;
}
private java.util.List appCollectors_ =
java.util.Collections.emptyList();
private void ensureAppCollectorsIsMutable() {
if (!((bitField0_ & 0x00008000) != 0)) {
appCollectors_ = new java.util.ArrayList(appCollectors_);
bitField0_ |= 0x00008000;
}
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProtoOrBuilder> appCollectorsBuilder_;
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 16;
*/
public java.util.List getAppCollectorsList() {
if (appCollectorsBuilder_ == null) {
return java.util.Collections.unmodifiableList(appCollectors_);
} else {
return appCollectorsBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 16;
*/
public int getAppCollectorsCount() {
if (appCollectorsBuilder_ == null) {
return appCollectors_.size();
} else {
return appCollectorsBuilder_.getCount();
}
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 16;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto getAppCollectors(int index) {
if (appCollectorsBuilder_ == null) {
return appCollectors_.get(index);
} else {
return appCollectorsBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 16;
*/
public Builder setAppCollectors(
int index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto value) {
if (appCollectorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAppCollectorsIsMutable();
appCollectors_.set(index, value);
onChanged();
} else {
appCollectorsBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 16;
*/
public Builder setAppCollectors(
int index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.Builder builderForValue) {
if (appCollectorsBuilder_ == null) {
ensureAppCollectorsIsMutable();
appCollectors_.set(index, builderForValue.build());
onChanged();
} else {
appCollectorsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 16;
*/
public Builder addAppCollectors(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto value) {
if (appCollectorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAppCollectorsIsMutable();
appCollectors_.add(value);
onChanged();
} else {
appCollectorsBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 16;
*/
public Builder addAppCollectors(
int index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto value) {
if (appCollectorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAppCollectorsIsMutable();
appCollectors_.add(index, value);
onChanged();
} else {
appCollectorsBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 16;
*/
public Builder addAppCollectors(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.Builder builderForValue) {
if (appCollectorsBuilder_ == null) {
ensureAppCollectorsIsMutable();
appCollectors_.add(builderForValue.build());
onChanged();
} else {
appCollectorsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 16;
*/
public Builder addAppCollectors(
int index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.Builder builderForValue) {
if (appCollectorsBuilder_ == null) {
ensureAppCollectorsIsMutable();
appCollectors_.add(index, builderForValue.build());
onChanged();
} else {
appCollectorsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 16;
*/
public Builder addAllAppCollectors(
java.lang.Iterable extends org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto> values) {
if (appCollectorsBuilder_ == null) {
ensureAppCollectorsIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, appCollectors_);
onChanged();
} else {
appCollectorsBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 16;
*/
public Builder clearAppCollectors() {
if (appCollectorsBuilder_ == null) {
appCollectors_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00008000);
onChanged();
} else {
appCollectorsBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 16;
*/
public Builder removeAppCollectors(int index) {
if (appCollectorsBuilder_ == null) {
ensureAppCollectorsIsMutable();
appCollectors_.remove(index);
onChanged();
} else {
appCollectorsBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 16;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.Builder getAppCollectorsBuilder(
int index) {
return getAppCollectorsFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 16;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProtoOrBuilder getAppCollectorsOrBuilder(
int index) {
if (appCollectorsBuilder_ == null) {
return appCollectors_.get(index); } else {
return appCollectorsBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 16;
*/
public java.util.List extends org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProtoOrBuilder>
getAppCollectorsOrBuilderList() {
if (appCollectorsBuilder_ != null) {
return appCollectorsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(appCollectors_);
}
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 16;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.Builder addAppCollectorsBuilder() {
return getAppCollectorsFieldBuilder().addBuilder(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 16;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.Builder addAppCollectorsBuilder(
int index) {
return getAppCollectorsFieldBuilder().addBuilder(
index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 16;
*/
public java.util.List
getAppCollectorsBuilderList() {
return getAppCollectorsFieldBuilder().getBuilderList();
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProtoOrBuilder>
getAppCollectorsFieldBuilder() {
if (appCollectorsBuilder_ == null) {
appCollectorsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProtoOrBuilder>(
appCollectors_,
((bitField0_ & 0x00008000) != 0),
getParentForChildren(),
isClean());
appCollectors_ = null;
}
return appCollectorsBuilder_;
}
private java.util.List containersToUpdate_ =
java.util.Collections.emptyList();
private void ensureContainersToUpdateIsMutable() {
if (!((bitField0_ & 0x00010000) != 0)) {
containersToUpdate_ = new java.util.ArrayList(containersToUpdate_);
bitField0_ |= 0x00010000;
}
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder> containersToUpdateBuilder_;
/**
*
* to be used in place of containers_to_decrease
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_update = 17;
*/
public java.util.List getContainersToUpdateList() {
if (containersToUpdateBuilder_ == null) {
return java.util.Collections.unmodifiableList(containersToUpdate_);
} else {
return containersToUpdateBuilder_.getMessageList();
}
}
/**
*
* to be used in place of containers_to_decrease
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_update = 17;
*/
public int getContainersToUpdateCount() {
if (containersToUpdateBuilder_ == null) {
return containersToUpdate_.size();
} else {
return containersToUpdateBuilder_.getCount();
}
}
/**
*
* to be used in place of containers_to_decrease
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_update = 17;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getContainersToUpdate(int index) {
if (containersToUpdateBuilder_ == null) {
return containersToUpdate_.get(index);
} else {
return containersToUpdateBuilder_.getMessage(index);
}
}
/**
*
* to be used in place of containers_to_decrease
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_update = 17;
*/
public Builder setContainersToUpdate(
int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto value) {
if (containersToUpdateBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureContainersToUpdateIsMutable();
containersToUpdate_.set(index, value);
onChanged();
} else {
containersToUpdateBuilder_.setMessage(index, value);
}
return this;
}
/**
*
* to be used in place of containers_to_decrease
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_update = 17;
*/
public Builder setContainersToUpdate(
int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder builderForValue) {
if (containersToUpdateBuilder_ == null) {
ensureContainersToUpdateIsMutable();
containersToUpdate_.set(index, builderForValue.build());
onChanged();
} else {
containersToUpdateBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
* to be used in place of containers_to_decrease
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_update = 17;
*/
public Builder addContainersToUpdate(org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto value) {
if (containersToUpdateBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureContainersToUpdateIsMutable();
containersToUpdate_.add(value);
onChanged();
} else {
containersToUpdateBuilder_.addMessage(value);
}
return this;
}
/**
*
* to be used in place of containers_to_decrease
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_update = 17;
*/
public Builder addContainersToUpdate(
int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto value) {
if (containersToUpdateBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureContainersToUpdateIsMutable();
containersToUpdate_.add(index, value);
onChanged();
} else {
containersToUpdateBuilder_.addMessage(index, value);
}
return this;
}
/**
*
* to be used in place of containers_to_decrease
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_update = 17;
*/
public Builder addContainersToUpdate(
org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder builderForValue) {
if (containersToUpdateBuilder_ == null) {
ensureContainersToUpdateIsMutable();
containersToUpdate_.add(builderForValue.build());
onChanged();
} else {
containersToUpdateBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
* to be used in place of containers_to_decrease
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_update = 17;
*/
public Builder addContainersToUpdate(
int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder builderForValue) {
if (containersToUpdateBuilder_ == null) {
ensureContainersToUpdateIsMutable();
containersToUpdate_.add(index, builderForValue.build());
onChanged();
} else {
containersToUpdateBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
* to be used in place of containers_to_decrease
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_update = 17;
*/
public Builder addAllContainersToUpdate(
java.lang.Iterable extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto> values) {
if (containersToUpdateBuilder_ == null) {
ensureContainersToUpdateIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, containersToUpdate_);
onChanged();
} else {
containersToUpdateBuilder_.addAllMessages(values);
}
return this;
}
/**
*
* to be used in place of containers_to_decrease
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_update = 17;
*/
public Builder clearContainersToUpdate() {
if (containersToUpdateBuilder_ == null) {
containersToUpdate_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00010000);
onChanged();
} else {
containersToUpdateBuilder_.clear();
}
return this;
}
/**
*
* to be used in place of containers_to_decrease
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_update = 17;
*/
public Builder removeContainersToUpdate(int index) {
if (containersToUpdateBuilder_ == null) {
ensureContainersToUpdateIsMutable();
containersToUpdate_.remove(index);
onChanged();
} else {
containersToUpdateBuilder_.remove(index);
}
return this;
}
/**
*
* to be used in place of containers_to_decrease
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_update = 17;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder getContainersToUpdateBuilder(
int index) {
return getContainersToUpdateFieldBuilder().getBuilder(index);
}
/**
*
* to be used in place of containers_to_decrease
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_update = 17;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder getContainersToUpdateOrBuilder(
int index) {
if (containersToUpdateBuilder_ == null) {
return containersToUpdate_.get(index); } else {
return containersToUpdateBuilder_.getMessageOrBuilder(index);
}
}
/**
*
* to be used in place of containers_to_decrease
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_update = 17;
*/
public java.util.List extends org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder>
getContainersToUpdateOrBuilderList() {
if (containersToUpdateBuilder_ != null) {
return containersToUpdateBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(containersToUpdate_);
}
}
/**
*
* to be used in place of containers_to_decrease
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_update = 17;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder addContainersToUpdateBuilder() {
return getContainersToUpdateFieldBuilder().addBuilder(
org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.getDefaultInstance());
}
/**
*
* to be used in place of containers_to_decrease
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_update = 17;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder addContainersToUpdateBuilder(
int index) {
return getContainersToUpdateFieldBuilder().addBuilder(
index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.getDefaultInstance());
}
/**
*
* to be used in place of containers_to_decrease
*
*
* repeated .hadoop.yarn.ContainerProto containers_to_update = 17;
*/
public java.util.List
getContainersToUpdateBuilderList() {
return getContainersToUpdateFieldBuilder().getBuilderList();
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder>
getContainersToUpdateFieldBuilder() {
if (containersToUpdateBuilder_ == null) {
containersToUpdateBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder>(
containersToUpdate_,
((bitField0_ & 0x00010000) != 0),
getParentForChildren(),
isClean());
containersToUpdate_ = null;
}
return containersToUpdateBuilder_;
}
private boolean areNodeAttributesAcceptedByRM_ ;
/**
* optional bool areNodeAttributesAcceptedByRM = 18 [default = false];
* @return Whether the areNodeAttributesAcceptedByRM field is set.
*/
@java.lang.Override
public boolean hasAreNodeAttributesAcceptedByRM() {
return ((bitField0_ & 0x00020000) != 0);
}
/**
* optional bool areNodeAttributesAcceptedByRM = 18 [default = false];
* @return The areNodeAttributesAcceptedByRM.
*/
@java.lang.Override
public boolean getAreNodeAttributesAcceptedByRM() {
return areNodeAttributesAcceptedByRM_;
}
/**
* optional bool areNodeAttributesAcceptedByRM = 18 [default = false];
* @param value The areNodeAttributesAcceptedByRM to set.
* @return This builder for chaining.
*/
public Builder setAreNodeAttributesAcceptedByRM(boolean value) {
areNodeAttributesAcceptedByRM_ = value;
bitField0_ |= 0x00020000;
onChanged();
return this;
}
/**
* optional bool areNodeAttributesAcceptedByRM = 18 [default = false];
* @return This builder for chaining.
*/
public Builder clearAreNodeAttributesAcceptedByRM() {
bitField0_ = (bitField0_ & ~0x00020000);
areNodeAttributesAcceptedByRM_ = false;
onChanged();
return this;
}
private long tokenSequenceNo_ ;
/**
* optional int64 tokenSequenceNo = 19;
* @return Whether the tokenSequenceNo field is set.
*/
@java.lang.Override
public boolean hasTokenSequenceNo() {
return ((bitField0_ & 0x00040000) != 0);
}
/**
* optional int64 tokenSequenceNo = 19;
* @return The tokenSequenceNo.
*/
@java.lang.Override
public long getTokenSequenceNo() {
return tokenSequenceNo_;
}
/**
* optional int64 tokenSequenceNo = 19;
* @param value The tokenSequenceNo to set.
* @return This builder for chaining.
*/
public Builder setTokenSequenceNo(long value) {
tokenSequenceNo_ = value;
bitField0_ |= 0x00040000;
onChanged();
return this;
}
/**
* optional int64 tokenSequenceNo = 19;
* @return This builder for chaining.
*/
public Builder clearTokenSequenceNo() {
bitField0_ = (bitField0_ & ~0x00040000);
tokenSequenceNo_ = 0L;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.NodeHeartbeatResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.NodeHeartbeatResponseProto)
private static final org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto();
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public NodeHeartbeatResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface ContainerQueuingLimitProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.ContainerQueuingLimitProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional int32 max_queue_length = 1;
* @return Whether the maxQueueLength field is set.
*/
boolean hasMaxQueueLength();
/**
* optional int32 max_queue_length = 1;
* @return The maxQueueLength.
*/
int getMaxQueueLength();
/**
* optional int32 max_queue_wait_time_in_ms = 2;
* @return Whether the maxQueueWaitTimeInMs field is set.
*/
boolean hasMaxQueueWaitTimeInMs();
/**
* optional int32 max_queue_wait_time_in_ms = 2;
* @return The maxQueueWaitTimeInMs.
*/
int getMaxQueueWaitTimeInMs();
}
/**
* Protobuf type {@code hadoop.yarn.ContainerQueuingLimitProto}
*/
public static final class ContainerQueuingLimitProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.ContainerQueuingLimitProto)
ContainerQueuingLimitProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use ContainerQueuingLimitProto.newBuilder() to construct.
private ContainerQueuingLimitProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private ContainerQueuingLimitProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ContainerQueuingLimitProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_ContainerQueuingLimitProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_ContainerQueuingLimitProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto.Builder.class);
}
private int bitField0_;
public static final int MAX_QUEUE_LENGTH_FIELD_NUMBER = 1;
private int maxQueueLength_ = 0;
/**
* optional int32 max_queue_length = 1;
* @return Whether the maxQueueLength field is set.
*/
@java.lang.Override
public boolean hasMaxQueueLength() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional int32 max_queue_length = 1;
* @return The maxQueueLength.
*/
@java.lang.Override
public int getMaxQueueLength() {
return maxQueueLength_;
}
public static final int MAX_QUEUE_WAIT_TIME_IN_MS_FIELD_NUMBER = 2;
private int maxQueueWaitTimeInMs_ = 0;
/**
* optional int32 max_queue_wait_time_in_ms = 2;
* @return Whether the maxQueueWaitTimeInMs field is set.
*/
@java.lang.Override
public boolean hasMaxQueueWaitTimeInMs() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional int32 max_queue_wait_time_in_ms = 2;
* @return The maxQueueWaitTimeInMs.
*/
@java.lang.Override
public int getMaxQueueWaitTimeInMs() {
return maxQueueWaitTimeInMs_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeInt32(1, maxQueueLength_);
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeInt32(2, maxQueueWaitTimeInMs_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt32Size(1, maxQueueLength_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt32Size(2, maxQueueWaitTimeInMs_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto other = (org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto) obj;
if (hasMaxQueueLength() != other.hasMaxQueueLength()) return false;
if (hasMaxQueueLength()) {
if (getMaxQueueLength()
!= other.getMaxQueueLength()) return false;
}
if (hasMaxQueueWaitTimeInMs() != other.hasMaxQueueWaitTimeInMs()) return false;
if (hasMaxQueueWaitTimeInMs()) {
if (getMaxQueueWaitTimeInMs()
!= other.getMaxQueueWaitTimeInMs()) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasMaxQueueLength()) {
hash = (37 * hash) + MAX_QUEUE_LENGTH_FIELD_NUMBER;
hash = (53 * hash) + getMaxQueueLength();
}
if (hasMaxQueueWaitTimeInMs()) {
hash = (37 * hash) + MAX_QUEUE_WAIT_TIME_IN_MS_FIELD_NUMBER;
hash = (53 * hash) + getMaxQueueWaitTimeInMs();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.ContainerQueuingLimitProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.ContainerQueuingLimitProto)
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_ContainerQueuingLimitProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_ContainerQueuingLimitProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
maxQueueLength_ = 0;
maxQueueWaitTimeInMs_ = 0;
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_ContainerQueuingLimitProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto build() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto result = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.maxQueueLength_ = maxQueueLength_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.maxQueueWaitTimeInMs_ = maxQueueWaitTimeInMs_;
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto) {
return mergeFrom((org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto.getDefaultInstance()) return this;
if (other.hasMaxQueueLength()) {
setMaxQueueLength(other.getMaxQueueLength());
}
if (other.hasMaxQueueWaitTimeInMs()) {
setMaxQueueWaitTimeInMs(other.getMaxQueueWaitTimeInMs());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
maxQueueLength_ = input.readInt32();
bitField0_ |= 0x00000001;
break;
} // case 8
case 16: {
maxQueueWaitTimeInMs_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int maxQueueLength_ ;
/**
* optional int32 max_queue_length = 1;
* @return Whether the maxQueueLength field is set.
*/
@java.lang.Override
public boolean hasMaxQueueLength() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional int32 max_queue_length = 1;
* @return The maxQueueLength.
*/
@java.lang.Override
public int getMaxQueueLength() {
return maxQueueLength_;
}
/**
* optional int32 max_queue_length = 1;
* @param value The maxQueueLength to set.
* @return This builder for chaining.
*/
public Builder setMaxQueueLength(int value) {
maxQueueLength_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional int32 max_queue_length = 1;
* @return This builder for chaining.
*/
public Builder clearMaxQueueLength() {
bitField0_ = (bitField0_ & ~0x00000001);
maxQueueLength_ = 0;
onChanged();
return this;
}
private int maxQueueWaitTimeInMs_ ;
/**
* optional int32 max_queue_wait_time_in_ms = 2;
* @return Whether the maxQueueWaitTimeInMs field is set.
*/
@java.lang.Override
public boolean hasMaxQueueWaitTimeInMs() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional int32 max_queue_wait_time_in_ms = 2;
* @return The maxQueueWaitTimeInMs.
*/
@java.lang.Override
public int getMaxQueueWaitTimeInMs() {
return maxQueueWaitTimeInMs_;
}
/**
* optional int32 max_queue_wait_time_in_ms = 2;
* @param value The maxQueueWaitTimeInMs to set.
* @return This builder for chaining.
*/
public Builder setMaxQueueWaitTimeInMs(int value) {
maxQueueWaitTimeInMs_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* optional int32 max_queue_wait_time_in_ms = 2;
* @return This builder for chaining.
*/
public Builder clearMaxQueueWaitTimeInMs() {
bitField0_ = (bitField0_ & ~0x00000002);
maxQueueWaitTimeInMs_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.ContainerQueuingLimitProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.ContainerQueuingLimitProto)
private static final org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto();
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public ContainerQueuingLimitProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ContainerQueuingLimitProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface SystemCredentialsForAppsProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.SystemCredentialsForAppsProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.ApplicationIdProto appId = 1;
* @return Whether the appId field is set.
*/
boolean hasAppId();
/**
* optional .hadoop.yarn.ApplicationIdProto appId = 1;
* @return The appId.
*/
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getAppId();
/**
* optional .hadoop.yarn.ApplicationIdProto appId = 1;
*/
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getAppIdOrBuilder();
/**
* optional bytes credentialsForApp = 2;
* @return Whether the credentialsForApp field is set.
*/
boolean hasCredentialsForApp();
/**
* optional bytes credentialsForApp = 2;
* @return The credentialsForApp.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString getCredentialsForApp();
}
/**
* Protobuf type {@code hadoop.yarn.SystemCredentialsForAppsProto}
*/
public static final class SystemCredentialsForAppsProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.SystemCredentialsForAppsProto)
SystemCredentialsForAppsProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use SystemCredentialsForAppsProto.newBuilder() to construct.
private SystemCredentialsForAppsProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private SystemCredentialsForAppsProto() {
credentialsForApp_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new SystemCredentialsForAppsProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_SystemCredentialsForAppsProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_SystemCredentialsForAppsProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto.Builder.class);
}
private int bitField0_;
public static final int APPID_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto appId_;
/**
* optional .hadoop.yarn.ApplicationIdProto appId = 1;
* @return Whether the appId field is set.
*/
@java.lang.Override
public boolean hasAppId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ApplicationIdProto appId = 1;
* @return The appId.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getAppId() {
return appId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : appId_;
}
/**
* optional .hadoop.yarn.ApplicationIdProto appId = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getAppIdOrBuilder() {
return appId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : appId_;
}
public static final int CREDENTIALSFORAPP_FIELD_NUMBER = 2;
private org.apache.hadoop.thirdparty.protobuf.ByteString credentialsForApp_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
/**
* optional bytes credentialsForApp = 2;
* @return Whether the credentialsForApp field is set.
*/
@java.lang.Override
public boolean hasCredentialsForApp() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional bytes credentialsForApp = 2;
* @return The credentialsForApp.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString getCredentialsForApp() {
return credentialsForApp_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getAppId());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeBytes(2, credentialsForApp_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getAppId());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeBytesSize(2, credentialsForApp_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto other = (org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto) obj;
if (hasAppId() != other.hasAppId()) return false;
if (hasAppId()) {
if (!getAppId()
.equals(other.getAppId())) return false;
}
if (hasCredentialsForApp() != other.hasCredentialsForApp()) return false;
if (hasCredentialsForApp()) {
if (!getCredentialsForApp()
.equals(other.getCredentialsForApp())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasAppId()) {
hash = (37 * hash) + APPID_FIELD_NUMBER;
hash = (53 * hash) + getAppId().hashCode();
}
if (hasCredentialsForApp()) {
hash = (37 * hash) + CREDENTIALSFORAPP_FIELD_NUMBER;
hash = (53 * hash) + getCredentialsForApp().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.SystemCredentialsForAppsProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.SystemCredentialsForAppsProto)
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_SystemCredentialsForAppsProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_SystemCredentialsForAppsProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getAppIdFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
appId_ = null;
if (appIdBuilder_ != null) {
appIdBuilder_.dispose();
appIdBuilder_ = null;
}
credentialsForApp_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_SystemCredentialsForAppsProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto build() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto result = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.appId_ = appIdBuilder_ == null
? appId_
: appIdBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.credentialsForApp_ = credentialsForApp_;
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto) {
return mergeFrom((org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto.getDefaultInstance()) return this;
if (other.hasAppId()) {
mergeAppId(other.getAppId());
}
if (other.hasCredentialsForApp()) {
setCredentialsForApp(other.getCredentialsForApp());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getAppIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
credentialsForApp_ = input.readBytes();
bitField0_ |= 0x00000002;
break;
} // case 18
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto appId_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> appIdBuilder_;
/**
* optional .hadoop.yarn.ApplicationIdProto appId = 1;
* @return Whether the appId field is set.
*/
public boolean hasAppId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ApplicationIdProto appId = 1;
* @return The appId.
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getAppId() {
if (appIdBuilder_ == null) {
return appId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : appId_;
} else {
return appIdBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.ApplicationIdProto appId = 1;
*/
public Builder setAppId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
if (appIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
appId_ = value;
} else {
appIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationIdProto appId = 1;
*/
public Builder setAppId(
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) {
if (appIdBuilder_ == null) {
appId_ = builderForValue.build();
} else {
appIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationIdProto appId = 1;
*/
public Builder mergeAppId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
if (appIdBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
appId_ != null &&
appId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) {
getAppIdBuilder().mergeFrom(value);
} else {
appId_ = value;
}
} else {
appIdBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationIdProto appId = 1;
*/
public Builder clearAppId() {
bitField0_ = (bitField0_ & ~0x00000001);
appId_ = null;
if (appIdBuilder_ != null) {
appIdBuilder_.dispose();
appIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationIdProto appId = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getAppIdBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getAppIdFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.ApplicationIdProto appId = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getAppIdOrBuilder() {
if (appIdBuilder_ != null) {
return appIdBuilder_.getMessageOrBuilder();
} else {
return appId_ == null ?
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : appId_;
}
}
/**
* optional .hadoop.yarn.ApplicationIdProto appId = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>
getAppIdFieldBuilder() {
if (appIdBuilder_ == null) {
appIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>(
getAppId(),
getParentForChildren(),
isClean());
appId_ = null;
}
return appIdBuilder_;
}
private org.apache.hadoop.thirdparty.protobuf.ByteString credentialsForApp_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
/**
* optional bytes credentialsForApp = 2;
* @return Whether the credentialsForApp field is set.
*/
@java.lang.Override
public boolean hasCredentialsForApp() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional bytes credentialsForApp = 2;
* @return The credentialsForApp.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString getCredentialsForApp() {
return credentialsForApp_;
}
/**
* optional bytes credentialsForApp = 2;
* @param value The credentialsForApp to set.
* @return This builder for chaining.
*/
public Builder setCredentialsForApp(org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
credentialsForApp_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* optional bytes credentialsForApp = 2;
* @return This builder for chaining.
*/
public Builder clearCredentialsForApp() {
bitField0_ = (bitField0_ & ~0x00000002);
credentialsForApp_ = getDefaultInstance().getCredentialsForApp();
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.SystemCredentialsForAppsProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.SystemCredentialsForAppsProto)
private static final org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto();
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public SystemCredentialsForAppsProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface AppCollectorDataProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.AppCollectorDataProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.ApplicationIdProto app_id = 1;
* @return Whether the appId field is set.
*/
boolean hasAppId();
/**
* optional .hadoop.yarn.ApplicationIdProto app_id = 1;
* @return The appId.
*/
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getAppId();
/**
* optional .hadoop.yarn.ApplicationIdProto app_id = 1;
*/
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getAppIdOrBuilder();
/**
* optional string app_collector_addr = 2;
* @return Whether the appCollectorAddr field is set.
*/
boolean hasAppCollectorAddr();
/**
* optional string app_collector_addr = 2;
* @return The appCollectorAddr.
*/
java.lang.String getAppCollectorAddr();
/**
* optional string app_collector_addr = 2;
* @return The bytes for appCollectorAddr.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getAppCollectorAddrBytes();
/**
* optional int64 rm_identifier = 3 [default = -1];
* @return Whether the rmIdentifier field is set.
*/
boolean hasRmIdentifier();
/**
* optional int64 rm_identifier = 3 [default = -1];
* @return The rmIdentifier.
*/
long getRmIdentifier();
/**
* optional int64 version = 4 [default = -1];
* @return Whether the version field is set.
*/
boolean hasVersion();
/**
* optional int64 version = 4 [default = -1];
* @return The version.
*/
long getVersion();
/**
* optional .hadoop.common.TokenProto app_collector_token = 5;
* @return Whether the appCollectorToken field is set.
*/
boolean hasAppCollectorToken();
/**
* optional .hadoop.common.TokenProto app_collector_token = 5;
* @return The appCollectorToken.
*/
org.apache.hadoop.security.proto.SecurityProtos.TokenProto getAppCollectorToken();
/**
* optional .hadoop.common.TokenProto app_collector_token = 5;
*/
org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getAppCollectorTokenOrBuilder();
}
/**
*
*//////////////////////////////////////////////////////////////////////
* //// From collector_nodemanager_protocol ////////////////////////////
* //////////////////////////////////////////////////////////////////////
*
*
* Protobuf type {@code hadoop.yarn.AppCollectorDataProto}
*/
public static final class AppCollectorDataProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.AppCollectorDataProto)
AppCollectorDataProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use AppCollectorDataProto.newBuilder() to construct.
private AppCollectorDataProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private AppCollectorDataProto() {
appCollectorAddr_ = "";
rmIdentifier_ = -1L;
version_ = -1L;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new AppCollectorDataProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_AppCollectorDataProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_AppCollectorDataProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.Builder.class);
}
private int bitField0_;
public static final int APP_ID_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto appId_;
/**
* optional .hadoop.yarn.ApplicationIdProto app_id = 1;
* @return Whether the appId field is set.
*/
@java.lang.Override
public boolean hasAppId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ApplicationIdProto app_id = 1;
* @return The appId.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getAppId() {
return appId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : appId_;
}
/**
* optional .hadoop.yarn.ApplicationIdProto app_id = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getAppIdOrBuilder() {
return appId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : appId_;
}
public static final int APP_COLLECTOR_ADDR_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object appCollectorAddr_ = "";
/**
* optional string app_collector_addr = 2;
* @return Whether the appCollectorAddr field is set.
*/
@java.lang.Override
public boolean hasAppCollectorAddr() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional string app_collector_addr = 2;
* @return The appCollectorAddr.
*/
@java.lang.Override
public java.lang.String getAppCollectorAddr() {
java.lang.Object ref = appCollectorAddr_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
appCollectorAddr_ = s;
}
return s;
}
}
/**
* optional string app_collector_addr = 2;
* @return The bytes for appCollectorAddr.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getAppCollectorAddrBytes() {
java.lang.Object ref = appCollectorAddr_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
appCollectorAddr_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int RM_IDENTIFIER_FIELD_NUMBER = 3;
private long rmIdentifier_ = -1L;
/**
* optional int64 rm_identifier = 3 [default = -1];
* @return Whether the rmIdentifier field is set.
*/
@java.lang.Override
public boolean hasRmIdentifier() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional int64 rm_identifier = 3 [default = -1];
* @return The rmIdentifier.
*/
@java.lang.Override
public long getRmIdentifier() {
return rmIdentifier_;
}
public static final int VERSION_FIELD_NUMBER = 4;
private long version_ = -1L;
/**
* optional int64 version = 4 [default = -1];
* @return Whether the version field is set.
*/
@java.lang.Override
public boolean hasVersion() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* optional int64 version = 4 [default = -1];
* @return The version.
*/
@java.lang.Override
public long getVersion() {
return version_;
}
public static final int APP_COLLECTOR_TOKEN_FIELD_NUMBER = 5;
private org.apache.hadoop.security.proto.SecurityProtos.TokenProto appCollectorToken_;
/**
* optional .hadoop.common.TokenProto app_collector_token = 5;
* @return Whether the appCollectorToken field is set.
*/
@java.lang.Override
public boolean hasAppCollectorToken() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* optional .hadoop.common.TokenProto app_collector_token = 5;
* @return The appCollectorToken.
*/
@java.lang.Override
public org.apache.hadoop.security.proto.SecurityProtos.TokenProto getAppCollectorToken() {
return appCollectorToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : appCollectorToken_;
}
/**
* optional .hadoop.common.TokenProto app_collector_token = 5;
*/
@java.lang.Override
public org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getAppCollectorTokenOrBuilder() {
return appCollectorToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : appCollectorToken_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (hasAppCollectorToken()) {
if (!getAppCollectorToken().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getAppId());
}
if (((bitField0_ & 0x00000002) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, appCollectorAddr_);
}
if (((bitField0_ & 0x00000004) != 0)) {
output.writeInt64(3, rmIdentifier_);
}
if (((bitField0_ & 0x00000008) != 0)) {
output.writeInt64(4, version_);
}
if (((bitField0_ & 0x00000010) != 0)) {
output.writeMessage(5, getAppCollectorToken());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getAppId());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, appCollectorAddr_);
}
if (((bitField0_ & 0x00000004) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt64Size(3, rmIdentifier_);
}
if (((bitField0_ & 0x00000008) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt64Size(4, version_);
}
if (((bitField0_ & 0x00000010) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(5, getAppCollectorToken());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto other = (org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto) obj;
if (hasAppId() != other.hasAppId()) return false;
if (hasAppId()) {
if (!getAppId()
.equals(other.getAppId())) return false;
}
if (hasAppCollectorAddr() != other.hasAppCollectorAddr()) return false;
if (hasAppCollectorAddr()) {
if (!getAppCollectorAddr()
.equals(other.getAppCollectorAddr())) return false;
}
if (hasRmIdentifier() != other.hasRmIdentifier()) return false;
if (hasRmIdentifier()) {
if (getRmIdentifier()
!= other.getRmIdentifier()) return false;
}
if (hasVersion() != other.hasVersion()) return false;
if (hasVersion()) {
if (getVersion()
!= other.getVersion()) return false;
}
if (hasAppCollectorToken() != other.hasAppCollectorToken()) return false;
if (hasAppCollectorToken()) {
if (!getAppCollectorToken()
.equals(other.getAppCollectorToken())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasAppId()) {
hash = (37 * hash) + APP_ID_FIELD_NUMBER;
hash = (53 * hash) + getAppId().hashCode();
}
if (hasAppCollectorAddr()) {
hash = (37 * hash) + APP_COLLECTOR_ADDR_FIELD_NUMBER;
hash = (53 * hash) + getAppCollectorAddr().hashCode();
}
if (hasRmIdentifier()) {
hash = (37 * hash) + RM_IDENTIFIER_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
getRmIdentifier());
}
if (hasVersion()) {
hash = (37 * hash) + VERSION_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
getVersion());
}
if (hasAppCollectorToken()) {
hash = (37 * hash) + APP_COLLECTOR_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getAppCollectorToken().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*//////////////////////////////////////////////////////////////////////
* //// From collector_nodemanager_protocol ////////////////////////////
* //////////////////////////////////////////////////////////////////////
*
*
* Protobuf type {@code hadoop.yarn.AppCollectorDataProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.AppCollectorDataProto)
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_AppCollectorDataProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_AppCollectorDataProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getAppIdFieldBuilder();
getAppCollectorTokenFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
appId_ = null;
if (appIdBuilder_ != null) {
appIdBuilder_.dispose();
appIdBuilder_ = null;
}
appCollectorAddr_ = "";
rmIdentifier_ = -1L;
version_ = -1L;
appCollectorToken_ = null;
if (appCollectorTokenBuilder_ != null) {
appCollectorTokenBuilder_.dispose();
appCollectorTokenBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_AppCollectorDataProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto build() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto result = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.appId_ = appIdBuilder_ == null
? appId_
: appIdBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.appCollectorAddr_ = appCollectorAddr_;
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.rmIdentifier_ = rmIdentifier_;
to_bitField0_ |= 0x00000004;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.version_ = version_;
to_bitField0_ |= 0x00000008;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.appCollectorToken_ = appCollectorTokenBuilder_ == null
? appCollectorToken_
: appCollectorTokenBuilder_.build();
to_bitField0_ |= 0x00000010;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto) {
return mergeFrom((org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.getDefaultInstance()) return this;
if (other.hasAppId()) {
mergeAppId(other.getAppId());
}
if (other.hasAppCollectorAddr()) {
appCollectorAddr_ = other.appCollectorAddr_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasRmIdentifier()) {
setRmIdentifier(other.getRmIdentifier());
}
if (other.hasVersion()) {
setVersion(other.getVersion());
}
if (other.hasAppCollectorToken()) {
mergeAppCollectorToken(other.getAppCollectorToken());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (hasAppCollectorToken()) {
if (!getAppCollectorToken().isInitialized()) {
return false;
}
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getAppIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
appCollectorAddr_ = input.readBytes();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24: {
rmIdentifier_ = input.readInt64();
bitField0_ |= 0x00000004;
break;
} // case 24
case 32: {
version_ = input.readInt64();
bitField0_ |= 0x00000008;
break;
} // case 32
case 42: {
input.readMessage(
getAppCollectorTokenFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000010;
break;
} // case 42
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto appId_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> appIdBuilder_;
/**
* optional .hadoop.yarn.ApplicationIdProto app_id = 1;
* @return Whether the appId field is set.
*/
public boolean hasAppId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ApplicationIdProto app_id = 1;
* @return The appId.
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getAppId() {
if (appIdBuilder_ == null) {
return appId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : appId_;
} else {
return appIdBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.ApplicationIdProto app_id = 1;
*/
public Builder setAppId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
if (appIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
appId_ = value;
} else {
appIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationIdProto app_id = 1;
*/
public Builder setAppId(
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) {
if (appIdBuilder_ == null) {
appId_ = builderForValue.build();
} else {
appIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationIdProto app_id = 1;
*/
public Builder mergeAppId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
if (appIdBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
appId_ != null &&
appId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) {
getAppIdBuilder().mergeFrom(value);
} else {
appId_ = value;
}
} else {
appIdBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationIdProto app_id = 1;
*/
public Builder clearAppId() {
bitField0_ = (bitField0_ & ~0x00000001);
appId_ = null;
if (appIdBuilder_ != null) {
appIdBuilder_.dispose();
appIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationIdProto app_id = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getAppIdBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getAppIdFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.ApplicationIdProto app_id = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getAppIdOrBuilder() {
if (appIdBuilder_ != null) {
return appIdBuilder_.getMessageOrBuilder();
} else {
return appId_ == null ?
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : appId_;
}
}
/**
* optional .hadoop.yarn.ApplicationIdProto app_id = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>
getAppIdFieldBuilder() {
if (appIdBuilder_ == null) {
appIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>(
getAppId(),
getParentForChildren(),
isClean());
appId_ = null;
}
return appIdBuilder_;
}
private java.lang.Object appCollectorAddr_ = "";
/**
* optional string app_collector_addr = 2;
* @return Whether the appCollectorAddr field is set.
*/
public boolean hasAppCollectorAddr() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional string app_collector_addr = 2;
* @return The appCollectorAddr.
*/
public java.lang.String getAppCollectorAddr() {
java.lang.Object ref = appCollectorAddr_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
appCollectorAddr_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string app_collector_addr = 2;
* @return The bytes for appCollectorAddr.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getAppCollectorAddrBytes() {
java.lang.Object ref = appCollectorAddr_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
appCollectorAddr_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string app_collector_addr = 2;
* @param value The appCollectorAddr to set.
* @return This builder for chaining.
*/
public Builder setAppCollectorAddr(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
appCollectorAddr_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* optional string app_collector_addr = 2;
* @return This builder for chaining.
*/
public Builder clearAppCollectorAddr() {
appCollectorAddr_ = getDefaultInstance().getAppCollectorAddr();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* optional string app_collector_addr = 2;
* @param value The bytes for appCollectorAddr to set.
* @return This builder for chaining.
*/
public Builder setAppCollectorAddrBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
appCollectorAddr_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private long rmIdentifier_ = -1L;
/**
* optional int64 rm_identifier = 3 [default = -1];
* @return Whether the rmIdentifier field is set.
*/
@java.lang.Override
public boolean hasRmIdentifier() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional int64 rm_identifier = 3 [default = -1];
* @return The rmIdentifier.
*/
@java.lang.Override
public long getRmIdentifier() {
return rmIdentifier_;
}
/**
* optional int64 rm_identifier = 3 [default = -1];
* @param value The rmIdentifier to set.
* @return This builder for chaining.
*/
public Builder setRmIdentifier(long value) {
rmIdentifier_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* optional int64 rm_identifier = 3 [default = -1];
* @return This builder for chaining.
*/
public Builder clearRmIdentifier() {
bitField0_ = (bitField0_ & ~0x00000004);
rmIdentifier_ = -1L;
onChanged();
return this;
}
private long version_ = -1L;
/**
* optional int64 version = 4 [default = -1];
* @return Whether the version field is set.
*/
@java.lang.Override
public boolean hasVersion() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* optional int64 version = 4 [default = -1];
* @return The version.
*/
@java.lang.Override
public long getVersion() {
return version_;
}
/**
* optional int64 version = 4 [default = -1];
* @param value The version to set.
* @return This builder for chaining.
*/
public Builder setVersion(long value) {
version_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
* optional int64 version = 4 [default = -1];
* @return This builder for chaining.
*/
public Builder clearVersion() {
bitField0_ = (bitField0_ & ~0x00000008);
version_ = -1L;
onChanged();
return this;
}
private org.apache.hadoop.security.proto.SecurityProtos.TokenProto appCollectorToken_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> appCollectorTokenBuilder_;
/**
* optional .hadoop.common.TokenProto app_collector_token = 5;
* @return Whether the appCollectorToken field is set.
*/
public boolean hasAppCollectorToken() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* optional .hadoop.common.TokenProto app_collector_token = 5;
* @return The appCollectorToken.
*/
public org.apache.hadoop.security.proto.SecurityProtos.TokenProto getAppCollectorToken() {
if (appCollectorTokenBuilder_ == null) {
return appCollectorToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : appCollectorToken_;
} else {
return appCollectorTokenBuilder_.getMessage();
}
}
/**
* optional .hadoop.common.TokenProto app_collector_token = 5;
*/
public Builder setAppCollectorToken(org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) {
if (appCollectorTokenBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
appCollectorToken_ = value;
} else {
appCollectorTokenBuilder_.setMessage(value);
}
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
* optional .hadoop.common.TokenProto app_collector_token = 5;
*/
public Builder setAppCollectorToken(
org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder builderForValue) {
if (appCollectorTokenBuilder_ == null) {
appCollectorToken_ = builderForValue.build();
} else {
appCollectorTokenBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
* optional .hadoop.common.TokenProto app_collector_token = 5;
*/
public Builder mergeAppCollectorToken(org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) {
if (appCollectorTokenBuilder_ == null) {
if (((bitField0_ & 0x00000010) != 0) &&
appCollectorToken_ != null &&
appCollectorToken_ != org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance()) {
getAppCollectorTokenBuilder().mergeFrom(value);
} else {
appCollectorToken_ = value;
}
} else {
appCollectorTokenBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
* optional .hadoop.common.TokenProto app_collector_token = 5;
*/
public Builder clearAppCollectorToken() {
bitField0_ = (bitField0_ & ~0x00000010);
appCollectorToken_ = null;
if (appCollectorTokenBuilder_ != null) {
appCollectorTokenBuilder_.dispose();
appCollectorTokenBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.common.TokenProto app_collector_token = 5;
*/
public org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder getAppCollectorTokenBuilder() {
bitField0_ |= 0x00000010;
onChanged();
return getAppCollectorTokenFieldBuilder().getBuilder();
}
/**
* optional .hadoop.common.TokenProto app_collector_token = 5;
*/
public org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getAppCollectorTokenOrBuilder() {
if (appCollectorTokenBuilder_ != null) {
return appCollectorTokenBuilder_.getMessageOrBuilder();
} else {
return appCollectorToken_ == null ?
org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : appCollectorToken_;
}
}
/**
* optional .hadoop.common.TokenProto app_collector_token = 5;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder>
getAppCollectorTokenFieldBuilder() {
if (appCollectorTokenBuilder_ == null) {
appCollectorTokenBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder>(
getAppCollectorToken(),
getParentForChildren(),
isClean());
appCollectorToken_ = null;
}
return appCollectorTokenBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.AppCollectorDataProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.AppCollectorDataProto)
private static final org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto();
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public AppCollectorDataProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface ReportNewCollectorInfoRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.ReportNewCollectorInfoRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 1;
*/
java.util.List
getAppCollectorsList();
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 1;
*/
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto getAppCollectors(int index);
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 1;
*/
int getAppCollectorsCount();
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 1;
*/
java.util.List extends org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProtoOrBuilder>
getAppCollectorsOrBuilderList();
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 1;
*/
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProtoOrBuilder getAppCollectorsOrBuilder(
int index);
}
/**
*
*////////////////////////////////////////////////////
* ///// collector_nodemanager_protocol //////////////
* ////////////////////////////////////////////////////
*
*
* Protobuf type {@code hadoop.yarn.ReportNewCollectorInfoRequestProto}
*/
public static final class ReportNewCollectorInfoRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.ReportNewCollectorInfoRequestProto)
ReportNewCollectorInfoRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use ReportNewCollectorInfoRequestProto.newBuilder() to construct.
private ReportNewCollectorInfoRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private ReportNewCollectorInfoRequestProto() {
appCollectors_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ReportNewCollectorInfoRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_ReportNewCollectorInfoRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_ReportNewCollectorInfoRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto.Builder.class);
}
public static final int APP_COLLECTORS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List appCollectors_;
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 1;
*/
@java.lang.Override
public java.util.List getAppCollectorsList() {
return appCollectors_;
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 1;
*/
@java.lang.Override
public java.util.List extends org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProtoOrBuilder>
getAppCollectorsOrBuilderList() {
return appCollectors_;
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 1;
*/
@java.lang.Override
public int getAppCollectorsCount() {
return appCollectors_.size();
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto getAppCollectors(int index) {
return appCollectors_.get(index);
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProtoOrBuilder getAppCollectorsOrBuilder(
int index) {
return appCollectors_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
for (int i = 0; i < getAppCollectorsCount(); i++) {
if (!getAppCollectors(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < appCollectors_.size(); i++) {
output.writeMessage(1, appCollectors_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < appCollectors_.size(); i++) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, appCollectors_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto other = (org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto) obj;
if (!getAppCollectorsList()
.equals(other.getAppCollectorsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getAppCollectorsCount() > 0) {
hash = (37 * hash) + APP_COLLECTORS_FIELD_NUMBER;
hash = (53 * hash) + getAppCollectorsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*////////////////////////////////////////////////////
* ///// collector_nodemanager_protocol //////////////
* ////////////////////////////////////////////////////
*
*
* Protobuf type {@code hadoop.yarn.ReportNewCollectorInfoRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.ReportNewCollectorInfoRequestProto)
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_ReportNewCollectorInfoRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_ReportNewCollectorInfoRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (appCollectorsBuilder_ == null) {
appCollectors_ = java.util.Collections.emptyList();
} else {
appCollectors_ = null;
appCollectorsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_ReportNewCollectorInfoRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto build() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto result = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto result) {
if (appCollectorsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
appCollectors_ = java.util.Collections.unmodifiableList(appCollectors_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.appCollectors_ = appCollectors_;
} else {
result.appCollectors_ = appCollectorsBuilder_.build();
}
}
private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto) {
return mergeFrom((org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto.getDefaultInstance()) return this;
if (appCollectorsBuilder_ == null) {
if (!other.appCollectors_.isEmpty()) {
if (appCollectors_.isEmpty()) {
appCollectors_ = other.appCollectors_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureAppCollectorsIsMutable();
appCollectors_.addAll(other.appCollectors_);
}
onChanged();
}
} else {
if (!other.appCollectors_.isEmpty()) {
if (appCollectorsBuilder_.isEmpty()) {
appCollectorsBuilder_.dispose();
appCollectorsBuilder_ = null;
appCollectors_ = other.appCollectors_;
bitField0_ = (bitField0_ & ~0x00000001);
appCollectorsBuilder_ =
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getAppCollectorsFieldBuilder() : null;
} else {
appCollectorsBuilder_.addAllMessages(other.appCollectors_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
for (int i = 0; i < getAppCollectorsCount(); i++) {
if (!getAppCollectors(i).isInitialized()) {
return false;
}
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto m =
input.readMessage(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.PARSER,
extensionRegistry);
if (appCollectorsBuilder_ == null) {
ensureAppCollectorsIsMutable();
appCollectors_.add(m);
} else {
appCollectorsBuilder_.addMessage(m);
}
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List appCollectors_ =
java.util.Collections.emptyList();
private void ensureAppCollectorsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
appCollectors_ = new java.util.ArrayList(appCollectors_);
bitField0_ |= 0x00000001;
}
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProtoOrBuilder> appCollectorsBuilder_;
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 1;
*/
public java.util.List getAppCollectorsList() {
if (appCollectorsBuilder_ == null) {
return java.util.Collections.unmodifiableList(appCollectors_);
} else {
return appCollectorsBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 1;
*/
public int getAppCollectorsCount() {
if (appCollectorsBuilder_ == null) {
return appCollectors_.size();
} else {
return appCollectorsBuilder_.getCount();
}
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 1;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto getAppCollectors(int index) {
if (appCollectorsBuilder_ == null) {
return appCollectors_.get(index);
} else {
return appCollectorsBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 1;
*/
public Builder setAppCollectors(
int index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto value) {
if (appCollectorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAppCollectorsIsMutable();
appCollectors_.set(index, value);
onChanged();
} else {
appCollectorsBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 1;
*/
public Builder setAppCollectors(
int index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.Builder builderForValue) {
if (appCollectorsBuilder_ == null) {
ensureAppCollectorsIsMutable();
appCollectors_.set(index, builderForValue.build());
onChanged();
} else {
appCollectorsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 1;
*/
public Builder addAppCollectors(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto value) {
if (appCollectorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAppCollectorsIsMutable();
appCollectors_.add(value);
onChanged();
} else {
appCollectorsBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 1;
*/
public Builder addAppCollectors(
int index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto value) {
if (appCollectorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAppCollectorsIsMutable();
appCollectors_.add(index, value);
onChanged();
} else {
appCollectorsBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 1;
*/
public Builder addAppCollectors(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.Builder builderForValue) {
if (appCollectorsBuilder_ == null) {
ensureAppCollectorsIsMutable();
appCollectors_.add(builderForValue.build());
onChanged();
} else {
appCollectorsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 1;
*/
public Builder addAppCollectors(
int index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.Builder builderForValue) {
if (appCollectorsBuilder_ == null) {
ensureAppCollectorsIsMutable();
appCollectors_.add(index, builderForValue.build());
onChanged();
} else {
appCollectorsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 1;
*/
public Builder addAllAppCollectors(
java.lang.Iterable extends org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto> values) {
if (appCollectorsBuilder_ == null) {
ensureAppCollectorsIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, appCollectors_);
onChanged();
} else {
appCollectorsBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 1;
*/
public Builder clearAppCollectors() {
if (appCollectorsBuilder_ == null) {
appCollectors_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
appCollectorsBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 1;
*/
public Builder removeAppCollectors(int index) {
if (appCollectorsBuilder_ == null) {
ensureAppCollectorsIsMutable();
appCollectors_.remove(index);
onChanged();
} else {
appCollectorsBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 1;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.Builder getAppCollectorsBuilder(
int index) {
return getAppCollectorsFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 1;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProtoOrBuilder getAppCollectorsOrBuilder(
int index) {
if (appCollectorsBuilder_ == null) {
return appCollectors_.get(index); } else {
return appCollectorsBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 1;
*/
public java.util.List extends org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProtoOrBuilder>
getAppCollectorsOrBuilderList() {
if (appCollectorsBuilder_ != null) {
return appCollectorsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(appCollectors_);
}
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 1;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.Builder addAppCollectorsBuilder() {
return getAppCollectorsFieldBuilder().addBuilder(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 1;
*/
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.Builder addAppCollectorsBuilder(
int index) {
return getAppCollectorsFieldBuilder().addBuilder(
index, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.AppCollectorDataProto app_collectors = 1;
*/
public java.util.List
getAppCollectorsBuilderList() {
return getAppCollectorsFieldBuilder().getBuilderList();
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProtoOrBuilder>
getAppCollectorsFieldBuilder() {
if (appCollectorsBuilder_ == null) {
appCollectorsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProto.Builder, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.AppCollectorDataProtoOrBuilder>(
appCollectors_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
appCollectors_ = null;
}
return appCollectorsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.ReportNewCollectorInfoRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.ReportNewCollectorInfoRequestProto)
private static final org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto();
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public ReportNewCollectorInfoRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface ReportNewCollectorInfoResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.ReportNewCollectorInfoResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.yarn.ReportNewCollectorInfoResponseProto}
*/
public static final class ReportNewCollectorInfoResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.ReportNewCollectorInfoResponseProto)
ReportNewCollectorInfoResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use ReportNewCollectorInfoResponseProto.newBuilder() to construct.
private ReportNewCollectorInfoResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private ReportNewCollectorInfoResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ReportNewCollectorInfoResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_ReportNewCollectorInfoResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_ReportNewCollectorInfoResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProto other = (org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.ReportNewCollectorInfoResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.ReportNewCollectorInfoResponseProto)
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_ReportNewCollectorInfoResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_ReportNewCollectorInfoResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_ReportNewCollectorInfoResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProto build() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProto result = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProto) {
return mergeFrom((org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.ReportNewCollectorInfoResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.ReportNewCollectorInfoResponseProto)
private static final org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProto();
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public ReportNewCollectorInfoResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetTimelineCollectorContextRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.GetTimelineCollectorContextRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.ApplicationIdProto appId = 1;
* @return Whether the appId field is set.
*/
boolean hasAppId();
/**
* optional .hadoop.yarn.ApplicationIdProto appId = 1;
* @return The appId.
*/
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getAppId();
/**
* optional .hadoop.yarn.ApplicationIdProto appId = 1;
*/
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getAppIdOrBuilder();
}
/**
* Protobuf type {@code hadoop.yarn.GetTimelineCollectorContextRequestProto}
*/
public static final class GetTimelineCollectorContextRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.GetTimelineCollectorContextRequestProto)
GetTimelineCollectorContextRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetTimelineCollectorContextRequestProto.newBuilder() to construct.
private GetTimelineCollectorContextRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetTimelineCollectorContextRequestProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetTimelineCollectorContextRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_GetTimelineCollectorContextRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_GetTimelineCollectorContextRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto.Builder.class);
}
private int bitField0_;
public static final int APPID_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto appId_;
/**
* optional .hadoop.yarn.ApplicationIdProto appId = 1;
* @return Whether the appId field is set.
*/
@java.lang.Override
public boolean hasAppId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ApplicationIdProto appId = 1;
* @return The appId.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getAppId() {
return appId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : appId_;
}
/**
* optional .hadoop.yarn.ApplicationIdProto appId = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getAppIdOrBuilder() {
return appId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : appId_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getAppId());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getAppId());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto other = (org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto) obj;
if (hasAppId() != other.hasAppId()) return false;
if (hasAppId()) {
if (!getAppId()
.equals(other.getAppId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasAppId()) {
hash = (37 * hash) + APPID_FIELD_NUMBER;
hash = (53 * hash) + getAppId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.GetTimelineCollectorContextRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.GetTimelineCollectorContextRequestProto)
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_GetTimelineCollectorContextRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_GetTimelineCollectorContextRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getAppIdFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
appId_ = null;
if (appIdBuilder_ != null) {
appIdBuilder_.dispose();
appIdBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_GetTimelineCollectorContextRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto build() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto result = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.appId_ = appIdBuilder_ == null
? appId_
: appIdBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto) {
return mergeFrom((org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto.getDefaultInstance()) return this;
if (other.hasAppId()) {
mergeAppId(other.getAppId());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getAppIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto appId_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> appIdBuilder_;
/**
* optional .hadoop.yarn.ApplicationIdProto appId = 1;
* @return Whether the appId field is set.
*/
public boolean hasAppId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ApplicationIdProto appId = 1;
* @return The appId.
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getAppId() {
if (appIdBuilder_ == null) {
return appId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : appId_;
} else {
return appIdBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.ApplicationIdProto appId = 1;
*/
public Builder setAppId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
if (appIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
appId_ = value;
} else {
appIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationIdProto appId = 1;
*/
public Builder setAppId(
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) {
if (appIdBuilder_ == null) {
appId_ = builderForValue.build();
} else {
appIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationIdProto appId = 1;
*/
public Builder mergeAppId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
if (appIdBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
appId_ != null &&
appId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) {
getAppIdBuilder().mergeFrom(value);
} else {
appId_ = value;
}
} else {
appIdBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationIdProto appId = 1;
*/
public Builder clearAppId() {
bitField0_ = (bitField0_ & ~0x00000001);
appId_ = null;
if (appIdBuilder_ != null) {
appIdBuilder_.dispose();
appIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ApplicationIdProto appId = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getAppIdBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getAppIdFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.ApplicationIdProto appId = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getAppIdOrBuilder() {
if (appIdBuilder_ != null) {
return appIdBuilder_.getMessageOrBuilder();
} else {
return appId_ == null ?
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : appId_;
}
}
/**
* optional .hadoop.yarn.ApplicationIdProto appId = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>
getAppIdFieldBuilder() {
if (appIdBuilder_ == null) {
appIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>(
getAppId(),
getParentForChildren(),
isClean());
appId_ = null;
}
return appIdBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.GetTimelineCollectorContextRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.GetTimelineCollectorContextRequestProto)
private static final org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto();
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetTimelineCollectorContextRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetTimelineCollectorContextResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.GetTimelineCollectorContextResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional string user_id = 1;
* @return Whether the userId field is set.
*/
boolean hasUserId();
/**
* optional string user_id = 1;
* @return The userId.
*/
java.lang.String getUserId();
/**
* optional string user_id = 1;
* @return The bytes for userId.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getUserIdBytes();
/**
* optional string flow_name = 2;
* @return Whether the flowName field is set.
*/
boolean hasFlowName();
/**
* optional string flow_name = 2;
* @return The flowName.
*/
java.lang.String getFlowName();
/**
* optional string flow_name = 2;
* @return The bytes for flowName.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getFlowNameBytes();
/**
* optional string flow_version = 3;
* @return Whether the flowVersion field is set.
*/
boolean hasFlowVersion();
/**
* optional string flow_version = 3;
* @return The flowVersion.
*/
java.lang.String getFlowVersion();
/**
* optional string flow_version = 3;
* @return The bytes for flowVersion.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getFlowVersionBytes();
/**
* optional int64 flow_run_id = 4;
* @return Whether the flowRunId field is set.
*/
boolean hasFlowRunId();
/**
* optional int64 flow_run_id = 4;
* @return The flowRunId.
*/
long getFlowRunId();
}
/**
* Protobuf type {@code hadoop.yarn.GetTimelineCollectorContextResponseProto}
*/
public static final class GetTimelineCollectorContextResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.GetTimelineCollectorContextResponseProto)
GetTimelineCollectorContextResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetTimelineCollectorContextResponseProto.newBuilder() to construct.
private GetTimelineCollectorContextResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetTimelineCollectorContextResponseProto() {
userId_ = "";
flowName_ = "";
flowVersion_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetTimelineCollectorContextResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_GetTimelineCollectorContextResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_GetTimelineCollectorContextResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto.Builder.class);
}
private int bitField0_;
public static final int USER_ID_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object userId_ = "";
/**
* optional string user_id = 1;
* @return Whether the userId field is set.
*/
@java.lang.Override
public boolean hasUserId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string user_id = 1;
* @return The userId.
*/
@java.lang.Override
public java.lang.String getUserId() {
java.lang.Object ref = userId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
userId_ = s;
}
return s;
}
}
/**
* optional string user_id = 1;
* @return The bytes for userId.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getUserIdBytes() {
java.lang.Object ref = userId_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
userId_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int FLOW_NAME_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object flowName_ = "";
/**
* optional string flow_name = 2;
* @return Whether the flowName field is set.
*/
@java.lang.Override
public boolean hasFlowName() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional string flow_name = 2;
* @return The flowName.
*/
@java.lang.Override
public java.lang.String getFlowName() {
java.lang.Object ref = flowName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
flowName_ = s;
}
return s;
}
}
/**
* optional string flow_name = 2;
* @return The bytes for flowName.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getFlowNameBytes() {
java.lang.Object ref = flowName_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
flowName_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int FLOW_VERSION_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object flowVersion_ = "";
/**
* optional string flow_version = 3;
* @return Whether the flowVersion field is set.
*/
@java.lang.Override
public boolean hasFlowVersion() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional string flow_version = 3;
* @return The flowVersion.
*/
@java.lang.Override
public java.lang.String getFlowVersion() {
java.lang.Object ref = flowVersion_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
flowVersion_ = s;
}
return s;
}
}
/**
* optional string flow_version = 3;
* @return The bytes for flowVersion.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getFlowVersionBytes() {
java.lang.Object ref = flowVersion_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
flowVersion_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int FLOW_RUN_ID_FIELD_NUMBER = 4;
private long flowRunId_ = 0L;
/**
* optional int64 flow_run_id = 4;
* @return Whether the flowRunId field is set.
*/
@java.lang.Override
public boolean hasFlowRunId() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* optional int64 flow_run_id = 4;
* @return The flowRunId.
*/
@java.lang.Override
public long getFlowRunId() {
return flowRunId_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, userId_);
}
if (((bitField0_ & 0x00000002) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, flowName_);
}
if (((bitField0_ & 0x00000004) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, flowVersion_);
}
if (((bitField0_ & 0x00000008) != 0)) {
output.writeInt64(4, flowRunId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, userId_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, flowName_);
}
if (((bitField0_ & 0x00000004) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, flowVersion_);
}
if (((bitField0_ & 0x00000008) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt64Size(4, flowRunId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto other = (org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto) obj;
if (hasUserId() != other.hasUserId()) return false;
if (hasUserId()) {
if (!getUserId()
.equals(other.getUserId())) return false;
}
if (hasFlowName() != other.hasFlowName()) return false;
if (hasFlowName()) {
if (!getFlowName()
.equals(other.getFlowName())) return false;
}
if (hasFlowVersion() != other.hasFlowVersion()) return false;
if (hasFlowVersion()) {
if (!getFlowVersion()
.equals(other.getFlowVersion())) return false;
}
if (hasFlowRunId() != other.hasFlowRunId()) return false;
if (hasFlowRunId()) {
if (getFlowRunId()
!= other.getFlowRunId()) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasUserId()) {
hash = (37 * hash) + USER_ID_FIELD_NUMBER;
hash = (53 * hash) + getUserId().hashCode();
}
if (hasFlowName()) {
hash = (37 * hash) + FLOW_NAME_FIELD_NUMBER;
hash = (53 * hash) + getFlowName().hashCode();
}
if (hasFlowVersion()) {
hash = (37 * hash) + FLOW_VERSION_FIELD_NUMBER;
hash = (53 * hash) + getFlowVersion().hashCode();
}
if (hasFlowRunId()) {
hash = (37 * hash) + FLOW_RUN_ID_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
getFlowRunId());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.GetTimelineCollectorContextResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.GetTimelineCollectorContextResponseProto)
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_GetTimelineCollectorContextResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_GetTimelineCollectorContextResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
userId_ = "";
flowName_ = "";
flowVersion_ = "";
flowRunId_ = 0L;
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_GetTimelineCollectorContextResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto build() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto result = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.userId_ = userId_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.flowName_ = flowName_;
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.flowVersion_ = flowVersion_;
to_bitField0_ |= 0x00000004;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.flowRunId_ = flowRunId_;
to_bitField0_ |= 0x00000008;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto) {
return mergeFrom((org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto.getDefaultInstance()) return this;
if (other.hasUserId()) {
userId_ = other.userId_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasFlowName()) {
flowName_ = other.flowName_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasFlowVersion()) {
flowVersion_ = other.flowVersion_;
bitField0_ |= 0x00000004;
onChanged();
}
if (other.hasFlowRunId()) {
setFlowRunId(other.getFlowRunId());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
userId_ = input.readBytes();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
flowName_ = input.readBytes();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26: {
flowVersion_ = input.readBytes();
bitField0_ |= 0x00000004;
break;
} // case 26
case 32: {
flowRunId_ = input.readInt64();
bitField0_ |= 0x00000008;
break;
} // case 32
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object userId_ = "";
/**
* optional string user_id = 1;
* @return Whether the userId field is set.
*/
public boolean hasUserId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string user_id = 1;
* @return The userId.
*/
public java.lang.String getUserId() {
java.lang.Object ref = userId_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
userId_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string user_id = 1;
* @return The bytes for userId.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getUserIdBytes() {
java.lang.Object ref = userId_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
userId_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string user_id = 1;
* @param value The userId to set.
* @return This builder for chaining.
*/
public Builder setUserId(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
userId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional string user_id = 1;
* @return This builder for chaining.
*/
public Builder clearUserId() {
userId_ = getDefaultInstance().getUserId();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* optional string user_id = 1;
* @param value The bytes for userId to set.
* @return This builder for chaining.
*/
public Builder setUserIdBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
userId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object flowName_ = "";
/**
* optional string flow_name = 2;
* @return Whether the flowName field is set.
*/
public boolean hasFlowName() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional string flow_name = 2;
* @return The flowName.
*/
public java.lang.String getFlowName() {
java.lang.Object ref = flowName_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
flowName_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string flow_name = 2;
* @return The bytes for flowName.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getFlowNameBytes() {
java.lang.Object ref = flowName_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
flowName_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string flow_name = 2;
* @param value The flowName to set.
* @return This builder for chaining.
*/
public Builder setFlowName(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
flowName_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* optional string flow_name = 2;
* @return This builder for chaining.
*/
public Builder clearFlowName() {
flowName_ = getDefaultInstance().getFlowName();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* optional string flow_name = 2;
* @param value The bytes for flowName to set.
* @return This builder for chaining.
*/
public Builder setFlowNameBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
flowName_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object flowVersion_ = "";
/**
* optional string flow_version = 3;
* @return Whether the flowVersion field is set.
*/
public boolean hasFlowVersion() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional string flow_version = 3;
* @return The flowVersion.
*/
public java.lang.String getFlowVersion() {
java.lang.Object ref = flowVersion_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
flowVersion_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string flow_version = 3;
* @return The bytes for flowVersion.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getFlowVersionBytes() {
java.lang.Object ref = flowVersion_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
flowVersion_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string flow_version = 3;
* @param value The flowVersion to set.
* @return This builder for chaining.
*/
public Builder setFlowVersion(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
flowVersion_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* optional string flow_version = 3;
* @return This builder for chaining.
*/
public Builder clearFlowVersion() {
flowVersion_ = getDefaultInstance().getFlowVersion();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
* optional string flow_version = 3;
* @param value The bytes for flowVersion to set.
* @return This builder for chaining.
*/
public Builder setFlowVersionBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
flowVersion_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private long flowRunId_ ;
/**
* optional int64 flow_run_id = 4;
* @return Whether the flowRunId field is set.
*/
@java.lang.Override
public boolean hasFlowRunId() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* optional int64 flow_run_id = 4;
* @return The flowRunId.
*/
@java.lang.Override
public long getFlowRunId() {
return flowRunId_;
}
/**
* optional int64 flow_run_id = 4;
* @param value The flowRunId to set.
* @return This builder for chaining.
*/
public Builder setFlowRunId(long value) {
flowRunId_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
* optional int64 flow_run_id = 4;
* @return This builder for chaining.
*/
public Builder clearFlowRunId() {
bitField0_ = (bitField0_ & ~0x00000008);
flowRunId_ = 0L;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.GetTimelineCollectorContextResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.GetTimelineCollectorContextResponseProto)
private static final org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto();
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetTimelineCollectorContextResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface NMContainerStatusProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.NMContainerStatusProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.yarn.ContainerIdProto container_id = 1;
* @return Whether the containerId field is set.
*/
boolean hasContainerId();
/**
* optional .hadoop.yarn.ContainerIdProto container_id = 1;
* @return The containerId.
*/
org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId();
/**
* optional .hadoop.yarn.ContainerIdProto container_id = 1;
*/
org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder();
/**
* optional .hadoop.yarn.ContainerStateProto container_state = 2;
* @return Whether the containerState field is set.
*/
boolean hasContainerState();
/**
* optional .hadoop.yarn.ContainerStateProto container_state = 2;
* @return The containerState.
*/
org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto getContainerState();
/**
* optional .hadoop.yarn.ResourceProto resource = 3;
* @return Whether the resource field is set.
*/
boolean hasResource();
/**
* optional .hadoop.yarn.ResourceProto resource = 3;
* @return The resource.
*/
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource();
/**
* optional .hadoop.yarn.ResourceProto resource = 3;
*/
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder();
/**
* optional .hadoop.yarn.PriorityProto priority = 4;
* @return Whether the priority field is set.
*/
boolean hasPriority();
/**
* optional .hadoop.yarn.PriorityProto priority = 4;
* @return The priority.
*/
org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority();
/**
* optional .hadoop.yarn.PriorityProto priority = 4;
*/
org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder();
/**
* optional string diagnostics = 5 [default = "N/A"];
* @return Whether the diagnostics field is set.
*/
boolean hasDiagnostics();
/**
* optional string diagnostics = 5 [default = "N/A"];
* @return The diagnostics.
*/
java.lang.String getDiagnostics();
/**
* optional string diagnostics = 5 [default = "N/A"];
* @return The bytes for diagnostics.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getDiagnosticsBytes();
/**
* optional int32 container_exit_status = 6;
* @return Whether the containerExitStatus field is set.
*/
boolean hasContainerExitStatus();
/**
* optional int32 container_exit_status = 6;
* @return The containerExitStatus.
*/
int getContainerExitStatus();
/**
* optional int64 creation_time = 7;
* @return Whether the creationTime field is set.
*/
boolean hasCreationTime();
/**
* optional int64 creation_time = 7;
* @return The creationTime.
*/
long getCreationTime();
/**
* optional string nodeLabelExpression = 8;
* @return Whether the nodeLabelExpression field is set.
*/
boolean hasNodeLabelExpression();
/**
* optional string nodeLabelExpression = 8;
* @return The nodeLabelExpression.
*/
java.lang.String getNodeLabelExpression();
/**
* optional string nodeLabelExpression = 8;
* @return The bytes for nodeLabelExpression.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getNodeLabelExpressionBytes();
/**
* optional int32 version = 9;
* @return Whether the version field is set.
*/
boolean hasVersion();
/**
* optional int32 version = 9;
* @return The version.
*/
int getVersion();
/**
* optional .hadoop.yarn.ExecutionTypeProto executionType = 10 [default = GUARANTEED];
* @return Whether the executionType field is set.
*/
boolean hasExecutionType();
/**
* optional .hadoop.yarn.ExecutionTypeProto executionType = 10 [default = GUARANTEED];
* @return The executionType.
*/
org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto getExecutionType();
/**
* optional int64 allocation_request_id = 11 [default = -1];
* @return Whether the allocationRequestId field is set.
*/
boolean hasAllocationRequestId();
/**
* optional int64 allocation_request_id = 11 [default = -1];
* @return The allocationRequestId.
*/
long getAllocationRequestId();
/**
* repeated string allocation_tags = 12;
* @return A list containing the allocationTags.
*/
java.util.List
getAllocationTagsList();
/**
* repeated string allocation_tags = 12;
* @return The count of allocationTags.
*/
int getAllocationTagsCount();
/**
* repeated string allocation_tags = 12;
* @param index The index of the element to return.
* @return The allocationTags at the given index.
*/
java.lang.String getAllocationTags(int index);
/**
* repeated string allocation_tags = 12;
* @param index The index of the value to return.
* @return The bytes of the allocationTags at the given index.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getAllocationTagsBytes(int index);
}
/**
* Protobuf type {@code hadoop.yarn.NMContainerStatusProto}
*/
public static final class NMContainerStatusProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.NMContainerStatusProto)
NMContainerStatusProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use NMContainerStatusProto.newBuilder() to construct.
private NMContainerStatusProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private NMContainerStatusProto() {
containerState_ = 1;
diagnostics_ = "N/A";
nodeLabelExpression_ = "";
executionType_ = 1;
allocationRequestId_ = -1L;
allocationTags_ =
org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new NMContainerStatusProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_NMContainerStatusProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_NMContainerStatusProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto.Builder.class);
}
private int bitField0_;
public static final int CONTAINER_ID_FIELD_NUMBER = 1;
private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_;
/**
* optional .hadoop.yarn.ContainerIdProto container_id = 1;
* @return Whether the containerId field is set.
*/
@java.lang.Override
public boolean hasContainerId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ContainerIdProto container_id = 1;
* @return The containerId.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() {
return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
}
/**
* optional .hadoop.yarn.ContainerIdProto container_id = 1;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() {
return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
}
public static final int CONTAINER_STATE_FIELD_NUMBER = 2;
private int containerState_ = 1;
/**
* optional .hadoop.yarn.ContainerStateProto container_state = 2;
* @return Whether the containerState field is set.
*/
@java.lang.Override public boolean hasContainerState() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional .hadoop.yarn.ContainerStateProto container_state = 2;
* @return The containerState.
*/
@java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto getContainerState() {
org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto.forNumber(containerState_);
return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto.C_NEW : result;
}
public static final int RESOURCE_FIELD_NUMBER = 3;
private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resource_;
/**
* optional .hadoop.yarn.ResourceProto resource = 3;
* @return Whether the resource field is set.
*/
@java.lang.Override
public boolean hasResource() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional .hadoop.yarn.ResourceProto resource = 3;
* @return The resource.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource() {
return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
}
/**
* optional .hadoop.yarn.ResourceProto resource = 3;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder() {
return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
}
public static final int PRIORITY_FIELD_NUMBER = 4;
private org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto priority_;
/**
* optional .hadoop.yarn.PriorityProto priority = 4;
* @return Whether the priority field is set.
*/
@java.lang.Override
public boolean hasPriority() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* optional .hadoop.yarn.PriorityProto priority = 4;
* @return The priority.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority() {
return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
}
/**
* optional .hadoop.yarn.PriorityProto priority = 4;
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder() {
return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
}
public static final int DIAGNOSTICS_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private volatile java.lang.Object diagnostics_ = "N/A";
/**
* optional string diagnostics = 5 [default = "N/A"];
* @return Whether the diagnostics field is set.
*/
@java.lang.Override
public boolean hasDiagnostics() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* optional string diagnostics = 5 [default = "N/A"];
* @return The diagnostics.
*/
@java.lang.Override
public java.lang.String getDiagnostics() {
java.lang.Object ref = diagnostics_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
diagnostics_ = s;
}
return s;
}
}
/**
* optional string diagnostics = 5 [default = "N/A"];
* @return The bytes for diagnostics.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getDiagnosticsBytes() {
java.lang.Object ref = diagnostics_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
diagnostics_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int CONTAINER_EXIT_STATUS_FIELD_NUMBER = 6;
private int containerExitStatus_ = 0;
/**
* optional int32 container_exit_status = 6;
* @return Whether the containerExitStatus field is set.
*/
@java.lang.Override
public boolean hasContainerExitStatus() {
return ((bitField0_ & 0x00000020) != 0);
}
/**
* optional int32 container_exit_status = 6;
* @return The containerExitStatus.
*/
@java.lang.Override
public int getContainerExitStatus() {
return containerExitStatus_;
}
public static final int CREATION_TIME_FIELD_NUMBER = 7;
private long creationTime_ = 0L;
/**
* optional int64 creation_time = 7;
* @return Whether the creationTime field is set.
*/
@java.lang.Override
public boolean hasCreationTime() {
return ((bitField0_ & 0x00000040) != 0);
}
/**
* optional int64 creation_time = 7;
* @return The creationTime.
*/
@java.lang.Override
public long getCreationTime() {
return creationTime_;
}
public static final int NODELABELEXPRESSION_FIELD_NUMBER = 8;
@SuppressWarnings("serial")
private volatile java.lang.Object nodeLabelExpression_ = "";
/**
* optional string nodeLabelExpression = 8;
* @return Whether the nodeLabelExpression field is set.
*/
@java.lang.Override
public boolean hasNodeLabelExpression() {
return ((bitField0_ & 0x00000080) != 0);
}
/**
* optional string nodeLabelExpression = 8;
* @return The nodeLabelExpression.
*/
@java.lang.Override
public java.lang.String getNodeLabelExpression() {
java.lang.Object ref = nodeLabelExpression_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
nodeLabelExpression_ = s;
}
return s;
}
}
/**
* optional string nodeLabelExpression = 8;
* @return The bytes for nodeLabelExpression.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getNodeLabelExpressionBytes() {
java.lang.Object ref = nodeLabelExpression_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
nodeLabelExpression_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int VERSION_FIELD_NUMBER = 9;
private int version_ = 0;
/**
* optional int32 version = 9;
* @return Whether the version field is set.
*/
@java.lang.Override
public boolean hasVersion() {
return ((bitField0_ & 0x00000100) != 0);
}
/**
* optional int32 version = 9;
* @return The version.
*/
@java.lang.Override
public int getVersion() {
return version_;
}
public static final int EXECUTIONTYPE_FIELD_NUMBER = 10;
private int executionType_ = 1;
/**
* optional .hadoop.yarn.ExecutionTypeProto executionType = 10 [default = GUARANTEED];
* @return Whether the executionType field is set.
*/
@java.lang.Override public boolean hasExecutionType() {
return ((bitField0_ & 0x00000200) != 0);
}
/**
* optional .hadoop.yarn.ExecutionTypeProto executionType = 10 [default = GUARANTEED];
* @return The executionType.
*/
@java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto getExecutionType() {
org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.forNumber(executionType_);
return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.GUARANTEED : result;
}
public static final int ALLOCATION_REQUEST_ID_FIELD_NUMBER = 11;
private long allocationRequestId_ = -1L;
/**
* optional int64 allocation_request_id = 11 [default = -1];
* @return Whether the allocationRequestId field is set.
*/
@java.lang.Override
public boolean hasAllocationRequestId() {
return ((bitField0_ & 0x00000400) != 0);
}
/**
* optional int64 allocation_request_id = 11 [default = -1];
* @return The allocationRequestId.
*/
@java.lang.Override
public long getAllocationRequestId() {
return allocationRequestId_;
}
public static final int ALLOCATION_TAGS_FIELD_NUMBER = 12;
@SuppressWarnings("serial")
private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList allocationTags_ =
org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
/**
* repeated string allocation_tags = 12;
* @return A list containing the allocationTags.
*/
public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
getAllocationTagsList() {
return allocationTags_;
}
/**
* repeated string allocation_tags = 12;
* @return The count of allocationTags.
*/
public int getAllocationTagsCount() {
return allocationTags_.size();
}
/**
* repeated string allocation_tags = 12;
* @param index The index of the element to return.
* @return The allocationTags at the given index.
*/
public java.lang.String getAllocationTags(int index) {
return allocationTags_.get(index);
}
/**
* repeated string allocation_tags = 12;
* @param index The index of the value to return.
* @return The bytes of the allocationTags at the given index.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getAllocationTagsBytes(int index) {
return allocationTags_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (hasResource()) {
if (!getResource().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getContainerId());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeEnum(2, containerState_);
}
if (((bitField0_ & 0x00000004) != 0)) {
output.writeMessage(3, getResource());
}
if (((bitField0_ & 0x00000008) != 0)) {
output.writeMessage(4, getPriority());
}
if (((bitField0_ & 0x00000010) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 5, diagnostics_);
}
if (((bitField0_ & 0x00000020) != 0)) {
output.writeInt32(6, containerExitStatus_);
}
if (((bitField0_ & 0x00000040) != 0)) {
output.writeInt64(7, creationTime_);
}
if (((bitField0_ & 0x00000080) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 8, nodeLabelExpression_);
}
if (((bitField0_ & 0x00000100) != 0)) {
output.writeInt32(9, version_);
}
if (((bitField0_ & 0x00000200) != 0)) {
output.writeEnum(10, executionType_);
}
if (((bitField0_ & 0x00000400) != 0)) {
output.writeInt64(11, allocationRequestId_);
}
for (int i = 0; i < allocationTags_.size(); i++) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 12, allocationTags_.getRaw(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getContainerId());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeEnumSize(2, containerState_);
}
if (((bitField0_ & 0x00000004) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(3, getResource());
}
if (((bitField0_ & 0x00000008) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(4, getPriority());
}
if (((bitField0_ & 0x00000010) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(5, diagnostics_);
}
if (((bitField0_ & 0x00000020) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt32Size(6, containerExitStatus_);
}
if (((bitField0_ & 0x00000040) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt64Size(7, creationTime_);
}
if (((bitField0_ & 0x00000080) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(8, nodeLabelExpression_);
}
if (((bitField0_ & 0x00000100) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt32Size(9, version_);
}
if (((bitField0_ & 0x00000200) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeEnumSize(10, executionType_);
}
if (((bitField0_ & 0x00000400) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt64Size(11, allocationRequestId_);
}
{
int dataSize = 0;
for (int i = 0; i < allocationTags_.size(); i++) {
dataSize += computeStringSizeNoTag(allocationTags_.getRaw(i));
}
size += dataSize;
size += 1 * getAllocationTagsList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto other = (org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto) obj;
if (hasContainerId() != other.hasContainerId()) return false;
if (hasContainerId()) {
if (!getContainerId()
.equals(other.getContainerId())) return false;
}
if (hasContainerState() != other.hasContainerState()) return false;
if (hasContainerState()) {
if (containerState_ != other.containerState_) return false;
}
if (hasResource() != other.hasResource()) return false;
if (hasResource()) {
if (!getResource()
.equals(other.getResource())) return false;
}
if (hasPriority() != other.hasPriority()) return false;
if (hasPriority()) {
if (!getPriority()
.equals(other.getPriority())) return false;
}
if (hasDiagnostics() != other.hasDiagnostics()) return false;
if (hasDiagnostics()) {
if (!getDiagnostics()
.equals(other.getDiagnostics())) return false;
}
if (hasContainerExitStatus() != other.hasContainerExitStatus()) return false;
if (hasContainerExitStatus()) {
if (getContainerExitStatus()
!= other.getContainerExitStatus()) return false;
}
if (hasCreationTime() != other.hasCreationTime()) return false;
if (hasCreationTime()) {
if (getCreationTime()
!= other.getCreationTime()) return false;
}
if (hasNodeLabelExpression() != other.hasNodeLabelExpression()) return false;
if (hasNodeLabelExpression()) {
if (!getNodeLabelExpression()
.equals(other.getNodeLabelExpression())) return false;
}
if (hasVersion() != other.hasVersion()) return false;
if (hasVersion()) {
if (getVersion()
!= other.getVersion()) return false;
}
if (hasExecutionType() != other.hasExecutionType()) return false;
if (hasExecutionType()) {
if (executionType_ != other.executionType_) return false;
}
if (hasAllocationRequestId() != other.hasAllocationRequestId()) return false;
if (hasAllocationRequestId()) {
if (getAllocationRequestId()
!= other.getAllocationRequestId()) return false;
}
if (!getAllocationTagsList()
.equals(other.getAllocationTagsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasContainerId()) {
hash = (37 * hash) + CONTAINER_ID_FIELD_NUMBER;
hash = (53 * hash) + getContainerId().hashCode();
}
if (hasContainerState()) {
hash = (37 * hash) + CONTAINER_STATE_FIELD_NUMBER;
hash = (53 * hash) + containerState_;
}
if (hasResource()) {
hash = (37 * hash) + RESOURCE_FIELD_NUMBER;
hash = (53 * hash) + getResource().hashCode();
}
if (hasPriority()) {
hash = (37 * hash) + PRIORITY_FIELD_NUMBER;
hash = (53 * hash) + getPriority().hashCode();
}
if (hasDiagnostics()) {
hash = (37 * hash) + DIAGNOSTICS_FIELD_NUMBER;
hash = (53 * hash) + getDiagnostics().hashCode();
}
if (hasContainerExitStatus()) {
hash = (37 * hash) + CONTAINER_EXIT_STATUS_FIELD_NUMBER;
hash = (53 * hash) + getContainerExitStatus();
}
if (hasCreationTime()) {
hash = (37 * hash) + CREATION_TIME_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
getCreationTime());
}
if (hasNodeLabelExpression()) {
hash = (37 * hash) + NODELABELEXPRESSION_FIELD_NUMBER;
hash = (53 * hash) + getNodeLabelExpression().hashCode();
}
if (hasVersion()) {
hash = (37 * hash) + VERSION_FIELD_NUMBER;
hash = (53 * hash) + getVersion();
}
if (hasExecutionType()) {
hash = (37 * hash) + EXECUTIONTYPE_FIELD_NUMBER;
hash = (53 * hash) + executionType_;
}
if (hasAllocationRequestId()) {
hash = (37 * hash) + ALLOCATION_REQUEST_ID_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
getAllocationRequestId());
}
if (getAllocationTagsCount() > 0) {
hash = (37 * hash) + ALLOCATION_TAGS_FIELD_NUMBER;
hash = (53 * hash) + getAllocationTagsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.NMContainerStatusProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.NMContainerStatusProto)
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_NMContainerStatusProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_NMContainerStatusProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getContainerIdFieldBuilder();
getResourceFieldBuilder();
getPriorityFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
containerId_ = null;
if (containerIdBuilder_ != null) {
containerIdBuilder_.dispose();
containerIdBuilder_ = null;
}
containerState_ = 1;
resource_ = null;
if (resourceBuilder_ != null) {
resourceBuilder_.dispose();
resourceBuilder_ = null;
}
priority_ = null;
if (priorityBuilder_ != null) {
priorityBuilder_.dispose();
priorityBuilder_ = null;
}
diagnostics_ = "N/A";
containerExitStatus_ = 0;
creationTime_ = 0L;
nodeLabelExpression_ = "";
version_ = 0;
executionType_ = 1;
allocationRequestId_ = -1L;
allocationTags_ =
org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_NMContainerStatusProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto build() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto result = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.containerId_ = containerIdBuilder_ == null
? containerId_
: containerIdBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.containerState_ = containerState_;
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.resource_ = resourceBuilder_ == null
? resource_
: resourceBuilder_.build();
to_bitField0_ |= 0x00000004;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.priority_ = priorityBuilder_ == null
? priority_
: priorityBuilder_.build();
to_bitField0_ |= 0x00000008;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.diagnostics_ = diagnostics_;
to_bitField0_ |= 0x00000010;
}
if (((from_bitField0_ & 0x00000020) != 0)) {
result.containerExitStatus_ = containerExitStatus_;
to_bitField0_ |= 0x00000020;
}
if (((from_bitField0_ & 0x00000040) != 0)) {
result.creationTime_ = creationTime_;
to_bitField0_ |= 0x00000040;
}
if (((from_bitField0_ & 0x00000080) != 0)) {
result.nodeLabelExpression_ = nodeLabelExpression_;
to_bitField0_ |= 0x00000080;
}
if (((from_bitField0_ & 0x00000100) != 0)) {
result.version_ = version_;
to_bitField0_ |= 0x00000100;
}
if (((from_bitField0_ & 0x00000200) != 0)) {
result.executionType_ = executionType_;
to_bitField0_ |= 0x00000200;
}
if (((from_bitField0_ & 0x00000400) != 0)) {
result.allocationRequestId_ = allocationRequestId_;
to_bitField0_ |= 0x00000400;
}
if (((from_bitField0_ & 0x00000800) != 0)) {
allocationTags_.makeImmutable();
result.allocationTags_ = allocationTags_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto) {
return mergeFrom((org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto.getDefaultInstance()) return this;
if (other.hasContainerId()) {
mergeContainerId(other.getContainerId());
}
if (other.hasContainerState()) {
setContainerState(other.getContainerState());
}
if (other.hasResource()) {
mergeResource(other.getResource());
}
if (other.hasPriority()) {
mergePriority(other.getPriority());
}
if (other.hasDiagnostics()) {
diagnostics_ = other.diagnostics_;
bitField0_ |= 0x00000010;
onChanged();
}
if (other.hasContainerExitStatus()) {
setContainerExitStatus(other.getContainerExitStatus());
}
if (other.hasCreationTime()) {
setCreationTime(other.getCreationTime());
}
if (other.hasNodeLabelExpression()) {
nodeLabelExpression_ = other.nodeLabelExpression_;
bitField0_ |= 0x00000080;
onChanged();
}
if (other.hasVersion()) {
setVersion(other.getVersion());
}
if (other.hasExecutionType()) {
setExecutionType(other.getExecutionType());
}
if (other.hasAllocationRequestId()) {
setAllocationRequestId(other.getAllocationRequestId());
}
if (!other.allocationTags_.isEmpty()) {
if (allocationTags_.isEmpty()) {
allocationTags_ = other.allocationTags_;
bitField0_ |= 0x00000800;
} else {
ensureAllocationTagsIsMutable();
allocationTags_.addAll(other.allocationTags_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (hasResource()) {
if (!getResource().isInitialized()) {
return false;
}
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getContainerIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 16: {
int tmpRaw = input.readEnum();
org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto tmpValue =
org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto.forNumber(tmpRaw);
if (tmpValue == null) {
mergeUnknownVarintField(2, tmpRaw);
} else {
containerState_ = tmpRaw;
bitField0_ |= 0x00000002;
}
break;
} // case 16
case 26: {
input.readMessage(
getResourceFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
case 34: {
input.readMessage(
getPriorityFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000008;
break;
} // case 34
case 42: {
diagnostics_ = input.readBytes();
bitField0_ |= 0x00000010;
break;
} // case 42
case 48: {
containerExitStatus_ = input.readInt32();
bitField0_ |= 0x00000020;
break;
} // case 48
case 56: {
creationTime_ = input.readInt64();
bitField0_ |= 0x00000040;
break;
} // case 56
case 66: {
nodeLabelExpression_ = input.readBytes();
bitField0_ |= 0x00000080;
break;
} // case 66
case 72: {
version_ = input.readInt32();
bitField0_ |= 0x00000100;
break;
} // case 72
case 80: {
int tmpRaw = input.readEnum();
org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto tmpValue =
org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.forNumber(tmpRaw);
if (tmpValue == null) {
mergeUnknownVarintField(10, tmpRaw);
} else {
executionType_ = tmpRaw;
bitField0_ |= 0x00000200;
}
break;
} // case 80
case 88: {
allocationRequestId_ = input.readInt64();
bitField0_ |= 0x00000400;
break;
} // case 88
case 98: {
org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
ensureAllocationTagsIsMutable();
allocationTags_.add(bs);
break;
} // case 98
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> containerIdBuilder_;
/**
* optional .hadoop.yarn.ContainerIdProto container_id = 1;
* @return Whether the containerId field is set.
*/
public boolean hasContainerId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.yarn.ContainerIdProto container_id = 1;
* @return The containerId.
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() {
if (containerIdBuilder_ == null) {
return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
} else {
return containerIdBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.ContainerIdProto container_id = 1;
*/
public Builder setContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
if (containerIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
containerId_ = value;
} else {
containerIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ContainerIdProto container_id = 1;
*/
public Builder setContainerId(
org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
if (containerIdBuilder_ == null) {
containerId_ = builderForValue.build();
} else {
containerIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ContainerIdProto container_id = 1;
*/
public Builder mergeContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
if (containerIdBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
containerId_ != null &&
containerId_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) {
getContainerIdBuilder().mergeFrom(value);
} else {
containerId_ = value;
}
} else {
containerIdBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ContainerIdProto container_id = 1;
*/
public Builder clearContainerId() {
bitField0_ = (bitField0_ & ~0x00000001);
containerId_ = null;
if (containerIdBuilder_ != null) {
containerIdBuilder_.dispose();
containerIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ContainerIdProto container_id = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getContainerIdBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getContainerIdFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.ContainerIdProto container_id = 1;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() {
if (containerIdBuilder_ != null) {
return containerIdBuilder_.getMessageOrBuilder();
} else {
return containerId_ == null ?
org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
}
}
/**
* optional .hadoop.yarn.ContainerIdProto container_id = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>
getContainerIdFieldBuilder() {
if (containerIdBuilder_ == null) {
containerIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>(
getContainerId(),
getParentForChildren(),
isClean());
containerId_ = null;
}
return containerIdBuilder_;
}
private int containerState_ = 1;
/**
* optional .hadoop.yarn.ContainerStateProto container_state = 2;
* @return Whether the containerState field is set.
*/
@java.lang.Override public boolean hasContainerState() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional .hadoop.yarn.ContainerStateProto container_state = 2;
* @return The containerState.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto getContainerState() {
org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto.forNumber(containerState_);
return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto.C_NEW : result;
}
/**
* optional .hadoop.yarn.ContainerStateProto container_state = 2;
* @param value The containerState to set.
* @return This builder for chaining.
*/
public Builder setContainerState(org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
containerState_ = value.getNumber();
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ContainerStateProto container_state = 2;
* @return This builder for chaining.
*/
public Builder clearContainerState() {
bitField0_ = (bitField0_ & ~0x00000002);
containerState_ = 1;
onChanged();
return this;
}
private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resource_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> resourceBuilder_;
/**
* optional .hadoop.yarn.ResourceProto resource = 3;
* @return Whether the resource field is set.
*/
public boolean hasResource() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional .hadoop.yarn.ResourceProto resource = 3;
* @return The resource.
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource() {
if (resourceBuilder_ == null) {
return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
} else {
return resourceBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.ResourceProto resource = 3;
*/
public Builder setResource(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
if (resourceBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
resource_ = value;
} else {
resourceBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ResourceProto resource = 3;
*/
public Builder setResource(
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) {
if (resourceBuilder_ == null) {
resource_ = builderForValue.build();
} else {
resourceBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ResourceProto resource = 3;
*/
public Builder mergeResource(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
if (resourceBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0) &&
resource_ != null &&
resource_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) {
getResourceBuilder().mergeFrom(value);
} else {
resource_ = value;
}
} else {
resourceBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ResourceProto resource = 3;
*/
public Builder clearResource() {
bitField0_ = (bitField0_ & ~0x00000004);
resource_ = null;
if (resourceBuilder_ != null) {
resourceBuilder_.dispose();
resourceBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ResourceProto resource = 3;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getResourceBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getResourceFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.ResourceProto resource = 3;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder() {
if (resourceBuilder_ != null) {
return resourceBuilder_.getMessageOrBuilder();
} else {
return resource_ == null ?
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
}
}
/**
* optional .hadoop.yarn.ResourceProto resource = 3;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>
getResourceFieldBuilder() {
if (resourceBuilder_ == null) {
resourceBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>(
getResource(),
getParentForChildren(),
isClean());
resource_ = null;
}
return resourceBuilder_;
}
private org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto priority_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> priorityBuilder_;
/**
* optional .hadoop.yarn.PriorityProto priority = 4;
* @return Whether the priority field is set.
*/
public boolean hasPriority() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* optional .hadoop.yarn.PriorityProto priority = 4;
* @return The priority.
*/
public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority() {
if (priorityBuilder_ == null) {
return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
} else {
return priorityBuilder_.getMessage();
}
}
/**
* optional .hadoop.yarn.PriorityProto priority = 4;
*/
public Builder setPriority(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) {
if (priorityBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
priority_ = value;
} else {
priorityBuilder_.setMessage(value);
}
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.PriorityProto priority = 4;
*/
public Builder setPriority(
org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder builderForValue) {
if (priorityBuilder_ == null) {
priority_ = builderForValue.build();
} else {
priorityBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.PriorityProto priority = 4;
*/
public Builder mergePriority(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) {
if (priorityBuilder_ == null) {
if (((bitField0_ & 0x00000008) != 0) &&
priority_ != null &&
priority_ != org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance()) {
getPriorityBuilder().mergeFrom(value);
} else {
priority_ = value;
}
} else {
priorityBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
* optional .hadoop.yarn.PriorityProto priority = 4;
*/
public Builder clearPriority() {
bitField0_ = (bitField0_ & ~0x00000008);
priority_ = null;
if (priorityBuilder_ != null) {
priorityBuilder_.dispose();
priorityBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.yarn.PriorityProto priority = 4;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder getPriorityBuilder() {
bitField0_ |= 0x00000008;
onChanged();
return getPriorityFieldBuilder().getBuilder();
}
/**
* optional .hadoop.yarn.PriorityProto priority = 4;
*/
public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder() {
if (priorityBuilder_ != null) {
return priorityBuilder_.getMessageOrBuilder();
} else {
return priority_ == null ?
org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
}
}
/**
* optional .hadoop.yarn.PriorityProto priority = 4;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder>
getPriorityFieldBuilder() {
if (priorityBuilder_ == null) {
priorityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder>(
getPriority(),
getParentForChildren(),
isClean());
priority_ = null;
}
return priorityBuilder_;
}
private java.lang.Object diagnostics_ = "N/A";
/**
* optional string diagnostics = 5 [default = "N/A"];
* @return Whether the diagnostics field is set.
*/
public boolean hasDiagnostics() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* optional string diagnostics = 5 [default = "N/A"];
* @return The diagnostics.
*/
public java.lang.String getDiagnostics() {
java.lang.Object ref = diagnostics_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
diagnostics_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string diagnostics = 5 [default = "N/A"];
* @return The bytes for diagnostics.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getDiagnosticsBytes() {
java.lang.Object ref = diagnostics_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
diagnostics_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string diagnostics = 5 [default = "N/A"];
* @param value The diagnostics to set.
* @return This builder for chaining.
*/
public Builder setDiagnostics(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
diagnostics_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
* optional string diagnostics = 5 [default = "N/A"];
* @return This builder for chaining.
*/
public Builder clearDiagnostics() {
diagnostics_ = getDefaultInstance().getDiagnostics();
bitField0_ = (bitField0_ & ~0x00000010);
onChanged();
return this;
}
/**
* optional string diagnostics = 5 [default = "N/A"];
* @param value The bytes for diagnostics to set.
* @return This builder for chaining.
*/
public Builder setDiagnosticsBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
diagnostics_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
private int containerExitStatus_ ;
/**
* optional int32 container_exit_status = 6;
* @return Whether the containerExitStatus field is set.
*/
@java.lang.Override
public boolean hasContainerExitStatus() {
return ((bitField0_ & 0x00000020) != 0);
}
/**
* optional int32 container_exit_status = 6;
* @return The containerExitStatus.
*/
@java.lang.Override
public int getContainerExitStatus() {
return containerExitStatus_;
}
/**
* optional int32 container_exit_status = 6;
* @param value The containerExitStatus to set.
* @return This builder for chaining.
*/
public Builder setContainerExitStatus(int value) {
containerExitStatus_ = value;
bitField0_ |= 0x00000020;
onChanged();
return this;
}
/**
* optional int32 container_exit_status = 6;
* @return This builder for chaining.
*/
public Builder clearContainerExitStatus() {
bitField0_ = (bitField0_ & ~0x00000020);
containerExitStatus_ = 0;
onChanged();
return this;
}
private long creationTime_ ;
/**
* optional int64 creation_time = 7;
* @return Whether the creationTime field is set.
*/
@java.lang.Override
public boolean hasCreationTime() {
return ((bitField0_ & 0x00000040) != 0);
}
/**
* optional int64 creation_time = 7;
* @return The creationTime.
*/
@java.lang.Override
public long getCreationTime() {
return creationTime_;
}
/**
* optional int64 creation_time = 7;
* @param value The creationTime to set.
* @return This builder for chaining.
*/
public Builder setCreationTime(long value) {
creationTime_ = value;
bitField0_ |= 0x00000040;
onChanged();
return this;
}
/**
* optional int64 creation_time = 7;
* @return This builder for chaining.
*/
public Builder clearCreationTime() {
bitField0_ = (bitField0_ & ~0x00000040);
creationTime_ = 0L;
onChanged();
return this;
}
private java.lang.Object nodeLabelExpression_ = "";
/**
* optional string nodeLabelExpression = 8;
* @return Whether the nodeLabelExpression field is set.
*/
public boolean hasNodeLabelExpression() {
return ((bitField0_ & 0x00000080) != 0);
}
/**
* optional string nodeLabelExpression = 8;
* @return The nodeLabelExpression.
*/
public java.lang.String getNodeLabelExpression() {
java.lang.Object ref = nodeLabelExpression_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
nodeLabelExpression_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string nodeLabelExpression = 8;
* @return The bytes for nodeLabelExpression.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getNodeLabelExpressionBytes() {
java.lang.Object ref = nodeLabelExpression_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
nodeLabelExpression_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string nodeLabelExpression = 8;
* @param value The nodeLabelExpression to set.
* @return This builder for chaining.
*/
public Builder setNodeLabelExpression(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
nodeLabelExpression_ = value;
bitField0_ |= 0x00000080;
onChanged();
return this;
}
/**
* optional string nodeLabelExpression = 8;
* @return This builder for chaining.
*/
public Builder clearNodeLabelExpression() {
nodeLabelExpression_ = getDefaultInstance().getNodeLabelExpression();
bitField0_ = (bitField0_ & ~0x00000080);
onChanged();
return this;
}
/**
* optional string nodeLabelExpression = 8;
* @param value The bytes for nodeLabelExpression to set.
* @return This builder for chaining.
*/
public Builder setNodeLabelExpressionBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
nodeLabelExpression_ = value;
bitField0_ |= 0x00000080;
onChanged();
return this;
}
private int version_ ;
/**
* optional int32 version = 9;
* @return Whether the version field is set.
*/
@java.lang.Override
public boolean hasVersion() {
return ((bitField0_ & 0x00000100) != 0);
}
/**
* optional int32 version = 9;
* @return The version.
*/
@java.lang.Override
public int getVersion() {
return version_;
}
/**
* optional int32 version = 9;
* @param value The version to set.
* @return This builder for chaining.
*/
public Builder setVersion(int value) {
version_ = value;
bitField0_ |= 0x00000100;
onChanged();
return this;
}
/**
* optional int32 version = 9;
* @return This builder for chaining.
*/
public Builder clearVersion() {
bitField0_ = (bitField0_ & ~0x00000100);
version_ = 0;
onChanged();
return this;
}
private int executionType_ = 1;
/**
* optional .hadoop.yarn.ExecutionTypeProto executionType = 10 [default = GUARANTEED];
* @return Whether the executionType field is set.
*/
@java.lang.Override public boolean hasExecutionType() {
return ((bitField0_ & 0x00000200) != 0);
}
/**
* optional .hadoop.yarn.ExecutionTypeProto executionType = 10 [default = GUARANTEED];
* @return The executionType.
*/
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto getExecutionType() {
org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.forNumber(executionType_);
return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.GUARANTEED : result;
}
/**
* optional .hadoop.yarn.ExecutionTypeProto executionType = 10 [default = GUARANTEED];
* @param value The executionType to set.
* @return This builder for chaining.
*/
public Builder setExecutionType(org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000200;
executionType_ = value.getNumber();
onChanged();
return this;
}
/**
* optional .hadoop.yarn.ExecutionTypeProto executionType = 10 [default = GUARANTEED];
* @return This builder for chaining.
*/
public Builder clearExecutionType() {
bitField0_ = (bitField0_ & ~0x00000200);
executionType_ = 1;
onChanged();
return this;
}
private long allocationRequestId_ = -1L;
/**
* optional int64 allocation_request_id = 11 [default = -1];
* @return Whether the allocationRequestId field is set.
*/
@java.lang.Override
public boolean hasAllocationRequestId() {
return ((bitField0_ & 0x00000400) != 0);
}
/**
* optional int64 allocation_request_id = 11 [default = -1];
* @return The allocationRequestId.
*/
@java.lang.Override
public long getAllocationRequestId() {
return allocationRequestId_;
}
/**
* optional int64 allocation_request_id = 11 [default = -1];
* @param value The allocationRequestId to set.
* @return This builder for chaining.
*/
public Builder setAllocationRequestId(long value) {
allocationRequestId_ = value;
bitField0_ |= 0x00000400;
onChanged();
return this;
}
/**
* optional int64 allocation_request_id = 11 [default = -1];
* @return This builder for chaining.
*/
public Builder clearAllocationRequestId() {
bitField0_ = (bitField0_ & ~0x00000400);
allocationRequestId_ = -1L;
onChanged();
return this;
}
private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList allocationTags_ =
org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
private void ensureAllocationTagsIsMutable() {
if (!allocationTags_.isModifiable()) {
allocationTags_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(allocationTags_);
}
bitField0_ |= 0x00000800;
}
/**
* repeated string allocation_tags = 12;
* @return A list containing the allocationTags.
*/
public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
getAllocationTagsList() {
allocationTags_.makeImmutable();
return allocationTags_;
}
/**
* repeated string allocation_tags = 12;
* @return The count of allocationTags.
*/
public int getAllocationTagsCount() {
return allocationTags_.size();
}
/**
* repeated string allocation_tags = 12;
* @param index The index of the element to return.
* @return The allocationTags at the given index.
*/
public java.lang.String getAllocationTags(int index) {
return allocationTags_.get(index);
}
/**
* repeated string allocation_tags = 12;
* @param index The index of the value to return.
* @return The bytes of the allocationTags at the given index.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getAllocationTagsBytes(int index) {
return allocationTags_.getByteString(index);
}
/**
* repeated string allocation_tags = 12;
* @param index The index to set the value at.
* @param value The allocationTags to set.
* @return This builder for chaining.
*/
public Builder setAllocationTags(
int index, java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureAllocationTagsIsMutable();
allocationTags_.set(index, value);
bitField0_ |= 0x00000800;
onChanged();
return this;
}
/**
* repeated string allocation_tags = 12;
* @param value The allocationTags to add.
* @return This builder for chaining.
*/
public Builder addAllocationTags(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureAllocationTagsIsMutable();
allocationTags_.add(value);
bitField0_ |= 0x00000800;
onChanged();
return this;
}
/**
* repeated string allocation_tags = 12;
* @param values The allocationTags to add.
* @return This builder for chaining.
*/
public Builder addAllAllocationTags(
java.lang.Iterable values) {
ensureAllocationTagsIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, allocationTags_);
bitField0_ |= 0x00000800;
onChanged();
return this;
}
/**
* repeated string allocation_tags = 12;
* @return This builder for chaining.
*/
public Builder clearAllocationTags() {
allocationTags_ =
org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000800);;
onChanged();
return this;
}
/**
* repeated string allocation_tags = 12;
* @param value The bytes of the allocationTags to add.
* @return This builder for chaining.
*/
public Builder addAllocationTagsBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
ensureAllocationTagsIsMutable();
allocationTags_.add(value);
bitField0_ |= 0x00000800;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.NMContainerStatusProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.NMContainerStatusProto)
private static final org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto();
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public NMContainerStatusProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NMContainerStatusProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface SCMUploaderNotifyRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.SCMUploaderNotifyRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional string resource_key = 1;
* @return Whether the resourceKey field is set.
*/
boolean hasResourceKey();
/**
* optional string resource_key = 1;
* @return The resourceKey.
*/
java.lang.String getResourceKey();
/**
* optional string resource_key = 1;
* @return The bytes for resourceKey.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getResourceKeyBytes();
/**
* optional string filename = 2;
* @return Whether the filename field is set.
*/
boolean hasFilename();
/**
* optional string filename = 2;
* @return The filename.
*/
java.lang.String getFilename();
/**
* optional string filename = 2;
* @return The bytes for filename.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getFilenameBytes();
}
/**
* Protobuf type {@code hadoop.yarn.SCMUploaderNotifyRequestProto}
*/
public static final class SCMUploaderNotifyRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.SCMUploaderNotifyRequestProto)
SCMUploaderNotifyRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use SCMUploaderNotifyRequestProto.newBuilder() to construct.
private SCMUploaderNotifyRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private SCMUploaderNotifyRequestProto() {
resourceKey_ = "";
filename_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new SCMUploaderNotifyRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_SCMUploaderNotifyRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_SCMUploaderNotifyRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto.Builder.class);
}
private int bitField0_;
public static final int RESOURCE_KEY_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object resourceKey_ = "";
/**
* optional string resource_key = 1;
* @return Whether the resourceKey field is set.
*/
@java.lang.Override
public boolean hasResourceKey() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string resource_key = 1;
* @return The resourceKey.
*/
@java.lang.Override
public java.lang.String getResourceKey() {
java.lang.Object ref = resourceKey_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
resourceKey_ = s;
}
return s;
}
}
/**
* optional string resource_key = 1;
* @return The bytes for resourceKey.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getResourceKeyBytes() {
java.lang.Object ref = resourceKey_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
resourceKey_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int FILENAME_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object filename_ = "";
/**
* optional string filename = 2;
* @return Whether the filename field is set.
*/
@java.lang.Override
public boolean hasFilename() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional string filename = 2;
* @return The filename.
*/
@java.lang.Override
public java.lang.String getFilename() {
java.lang.Object ref = filename_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
filename_ = s;
}
return s;
}
}
/**
* optional string filename = 2;
* @return The bytes for filename.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getFilenameBytes() {
java.lang.Object ref = filename_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
filename_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, resourceKey_);
}
if (((bitField0_ & 0x00000002) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, filename_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, resourceKey_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, filename_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto other = (org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto) obj;
if (hasResourceKey() != other.hasResourceKey()) return false;
if (hasResourceKey()) {
if (!getResourceKey()
.equals(other.getResourceKey())) return false;
}
if (hasFilename() != other.hasFilename()) return false;
if (hasFilename()) {
if (!getFilename()
.equals(other.getFilename())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasResourceKey()) {
hash = (37 * hash) + RESOURCE_KEY_FIELD_NUMBER;
hash = (53 * hash) + getResourceKey().hashCode();
}
if (hasFilename()) {
hash = (37 * hash) + FILENAME_FIELD_NUMBER;
hash = (53 * hash) + getFilename().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.SCMUploaderNotifyRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.SCMUploaderNotifyRequestProto)
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_SCMUploaderNotifyRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_SCMUploaderNotifyRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
resourceKey_ = "";
filename_ = "";
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_SCMUploaderNotifyRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto build() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto result = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.resourceKey_ = resourceKey_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.filename_ = filename_;
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto) {
return mergeFrom((org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto.getDefaultInstance()) return this;
if (other.hasResourceKey()) {
resourceKey_ = other.resourceKey_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasFilename()) {
filename_ = other.filename_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
resourceKey_ = input.readBytes();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
filename_ = input.readBytes();
bitField0_ |= 0x00000002;
break;
} // case 18
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object resourceKey_ = "";
/**
* optional string resource_key = 1;
* @return Whether the resourceKey field is set.
*/
public boolean hasResourceKey() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string resource_key = 1;
* @return The resourceKey.
*/
public java.lang.String getResourceKey() {
java.lang.Object ref = resourceKey_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
resourceKey_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string resource_key = 1;
* @return The bytes for resourceKey.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getResourceKeyBytes() {
java.lang.Object ref = resourceKey_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
resourceKey_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string resource_key = 1;
* @param value The resourceKey to set.
* @return This builder for chaining.
*/
public Builder setResourceKey(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
resourceKey_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional string resource_key = 1;
* @return This builder for chaining.
*/
public Builder clearResourceKey() {
resourceKey_ = getDefaultInstance().getResourceKey();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* optional string resource_key = 1;
* @param value The bytes for resourceKey to set.
* @return This builder for chaining.
*/
public Builder setResourceKeyBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
resourceKey_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object filename_ = "";
/**
* optional string filename = 2;
* @return Whether the filename field is set.
*/
public boolean hasFilename() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional string filename = 2;
* @return The filename.
*/
public java.lang.String getFilename() {
java.lang.Object ref = filename_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
filename_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string filename = 2;
* @return The bytes for filename.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getFilenameBytes() {
java.lang.Object ref = filename_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
filename_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string filename = 2;
* @param value The filename to set.
* @return This builder for chaining.
*/
public Builder setFilename(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
filename_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* optional string filename = 2;
* @return This builder for chaining.
*/
public Builder clearFilename() {
filename_ = getDefaultInstance().getFilename();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* optional string filename = 2;
* @param value The bytes for filename to set.
* @return This builder for chaining.
*/
public Builder setFilenameBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
filename_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.SCMUploaderNotifyRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.SCMUploaderNotifyRequestProto)
private static final org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto();
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public SCMUploaderNotifyRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface SCMUploaderNotifyResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.SCMUploaderNotifyResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional bool accepted = 1;
* @return Whether the accepted field is set.
*/
boolean hasAccepted();
/**
* optional bool accepted = 1;
* @return The accepted.
*/
boolean getAccepted();
}
/**
* Protobuf type {@code hadoop.yarn.SCMUploaderNotifyResponseProto}
*/
public static final class SCMUploaderNotifyResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.SCMUploaderNotifyResponseProto)
SCMUploaderNotifyResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use SCMUploaderNotifyResponseProto.newBuilder() to construct.
private SCMUploaderNotifyResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private SCMUploaderNotifyResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new SCMUploaderNotifyResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_SCMUploaderNotifyResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_SCMUploaderNotifyResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto.Builder.class);
}
private int bitField0_;
public static final int ACCEPTED_FIELD_NUMBER = 1;
private boolean accepted_ = false;
/**
* optional bool accepted = 1;
* @return Whether the accepted field is set.
*/
@java.lang.Override
public boolean hasAccepted() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional bool accepted = 1;
* @return The accepted.
*/
@java.lang.Override
public boolean getAccepted() {
return accepted_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeBool(1, accepted_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeBoolSize(1, accepted_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto other = (org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto) obj;
if (hasAccepted() != other.hasAccepted()) return false;
if (hasAccepted()) {
if (getAccepted()
!= other.getAccepted()) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasAccepted()) {
hash = (37 * hash) + ACCEPTED_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean(
getAccepted());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.SCMUploaderNotifyResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.SCMUploaderNotifyResponseProto)
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_SCMUploaderNotifyResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_SCMUploaderNotifyResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto.class, org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
accepted_ = false;
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.internal_static_hadoop_yarn_SCMUploaderNotifyResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto build() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto buildPartial() {
org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto result = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.accepted_ = accepted_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto) {
return mergeFrom((org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto other) {
if (other == org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto.getDefaultInstance()) return this;
if (other.hasAccepted()) {
setAccepted(other.getAccepted());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
accepted_ = input.readBool();
bitField0_ |= 0x00000001;
break;
} // case 8
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private boolean accepted_ ;
/**
* optional bool accepted = 1;
* @return Whether the accepted field is set.
*/
@java.lang.Override
public boolean hasAccepted() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional bool accepted = 1;
* @return The accepted.
*/
@java.lang.Override
public boolean getAccepted() {
return accepted_;
}
/**
* optional bool accepted = 1;
* @param value The accepted to set.
* @return This builder for chaining.
*/
public Builder setAccepted(boolean value) {
accepted_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional bool accepted = 1;
* @return This builder for chaining.
*/
public Builder clearAccepted() {
bitField0_ = (bitField0_ & ~0x00000001);
accepted_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.SCMUploaderNotifyResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.SCMUploaderNotifyResponseProto)
private static final org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto();
}
public static org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public SCMUploaderNotifyResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SCMUploaderNotifyResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface SCMUploaderCanUploadRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.SCMUploaderCanUploadRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
*