org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos Maven / Gradle / Ivy
The newest version!
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: mr_service_protos.proto
package org.apache.hadoop.mapreduce.v2.proto;
public final class MRServiceProtos {
private MRServiceProtos() {}
public static void registerAllExtensions(
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite registry) {
}
public static void registerAllExtensions(
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistry registry) {
registerAllExtensions(
(org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite) registry);
}
public interface GetJobReportRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.mapreduce.GetJobReportRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
* @return Whether the jobId field is set.
*/
boolean hasJobId();
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
* @return The jobId.
*/
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getJobId();
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder getJobIdOrBuilder();
}
/**
* Protobuf type {@code hadoop.mapreduce.GetJobReportRequestProto}
*/
public static final class GetJobReportRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.mapreduce.GetJobReportRequestProto)
GetJobReportRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetJobReportRequestProto.newBuilder() to construct.
private GetJobReportRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetJobReportRequestProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetJobReportRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetJobReportRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetJobReportRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto.Builder.class);
}
private int bitField0_;
public static final int JOB_ID_FIELD_NUMBER = 1;
private org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto jobId_;
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
* @return Whether the jobId field is set.
*/
@java.lang.Override
public boolean hasJobId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
* @return The jobId.
*/
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getJobId() {
return jobId_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance() : jobId_;
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder getJobIdOrBuilder() {
return jobId_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance() : jobId_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getJobId());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getJobId());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto) obj;
if (hasJobId() != other.hasJobId()) return false;
if (hasJobId()) {
if (!getJobId()
.equals(other.getJobId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasJobId()) {
hash = (37 * hash) + JOB_ID_FIELD_NUMBER;
hash = (53 * hash) + getJobId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.mapreduce.GetJobReportRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.mapreduce.GetJobReportRequestProto)
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetJobReportRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetJobReportRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getJobIdFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
jobId_ = null;
if (jobIdBuilder_ != null) {
jobIdBuilder_.dispose();
jobIdBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetJobReportRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto build() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto buildPartial() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.jobId_ = jobIdBuilder_ == null
? jobId_
: jobIdBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto) {
return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto other) {
if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto.getDefaultInstance()) return this;
if (other.hasJobId()) {
mergeJobId(other.getJobId());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getJobIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto jobId_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder> jobIdBuilder_;
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
* @return Whether the jobId field is set.
*/
public boolean hasJobId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
* @return The jobId.
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getJobId() {
if (jobIdBuilder_ == null) {
return jobId_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance() : jobId_;
} else {
return jobIdBuilder_.getMessage();
}
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
public Builder setJobId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto value) {
if (jobIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
jobId_ = value;
} else {
jobIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
public Builder setJobId(
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder builderForValue) {
if (jobIdBuilder_ == null) {
jobId_ = builderForValue.build();
} else {
jobIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
public Builder mergeJobId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto value) {
if (jobIdBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
jobId_ != null &&
jobId_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance()) {
getJobIdBuilder().mergeFrom(value);
} else {
jobId_ = value;
}
} else {
jobIdBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
public Builder clearJobId() {
bitField0_ = (bitField0_ & ~0x00000001);
jobId_ = null;
if (jobIdBuilder_ != null) {
jobIdBuilder_.dispose();
jobIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder getJobIdBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getJobIdFieldBuilder().getBuilder();
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder getJobIdOrBuilder() {
if (jobIdBuilder_ != null) {
return jobIdBuilder_.getMessageOrBuilder();
} else {
return jobId_ == null ?
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance() : jobId_;
}
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder>
getJobIdFieldBuilder() {
if (jobIdBuilder_ == null) {
jobIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder>(
getJobId(),
getParentForChildren(),
isClean());
jobId_ = null;
}
return jobIdBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.mapreduce.GetJobReportRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.mapreduce.GetJobReportRequestProto)
private static final org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto();
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetJobReportRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetJobReportResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.mapreduce.GetJobReportResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.mapreduce.JobReportProto job_report = 1;
* @return Whether the jobReport field is set.
*/
boolean hasJobReport();
/**
* optional .hadoop.mapreduce.JobReportProto job_report = 1;
* @return The jobReport.
*/
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto getJobReport();
/**
* optional .hadoop.mapreduce.JobReportProto job_report = 1;
*/
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProtoOrBuilder getJobReportOrBuilder();
}
/**
* Protobuf type {@code hadoop.mapreduce.GetJobReportResponseProto}
*/
public static final class GetJobReportResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.mapreduce.GetJobReportResponseProto)
GetJobReportResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetJobReportResponseProto.newBuilder() to construct.
private GetJobReportResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetJobReportResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetJobReportResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetJobReportResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetJobReportResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto.Builder.class);
}
private int bitField0_;
public static final int JOB_REPORT_FIELD_NUMBER = 1;
private org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto jobReport_;
/**
* optional .hadoop.mapreduce.JobReportProto job_report = 1;
* @return Whether the jobReport field is set.
*/
@java.lang.Override
public boolean hasJobReport() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.mapreduce.JobReportProto job_report = 1;
* @return The jobReport.
*/
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto getJobReport() {
return jobReport_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto.getDefaultInstance() : jobReport_;
}
/**
* optional .hadoop.mapreduce.JobReportProto job_report = 1;
*/
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProtoOrBuilder getJobReportOrBuilder() {
return jobReport_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto.getDefaultInstance() : jobReport_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getJobReport());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getJobReport());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto) obj;
if (hasJobReport() != other.hasJobReport()) return false;
if (hasJobReport()) {
if (!getJobReport()
.equals(other.getJobReport())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasJobReport()) {
hash = (37 * hash) + JOB_REPORT_FIELD_NUMBER;
hash = (53 * hash) + getJobReport().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.mapreduce.GetJobReportResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.mapreduce.GetJobReportResponseProto)
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetJobReportResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetJobReportResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getJobReportFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
jobReport_ = null;
if (jobReportBuilder_ != null) {
jobReportBuilder_.dispose();
jobReportBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetJobReportResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto build() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto buildPartial() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.jobReport_ = jobReportBuilder_ == null
? jobReport_
: jobReportBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto) {
return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto other) {
if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto.getDefaultInstance()) return this;
if (other.hasJobReport()) {
mergeJobReport(other.getJobReport());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getJobReportFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto jobReport_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProtoOrBuilder> jobReportBuilder_;
/**
* optional .hadoop.mapreduce.JobReportProto job_report = 1;
* @return Whether the jobReport field is set.
*/
public boolean hasJobReport() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.mapreduce.JobReportProto job_report = 1;
* @return The jobReport.
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto getJobReport() {
if (jobReportBuilder_ == null) {
return jobReport_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto.getDefaultInstance() : jobReport_;
} else {
return jobReportBuilder_.getMessage();
}
}
/**
* optional .hadoop.mapreduce.JobReportProto job_report = 1;
*/
public Builder setJobReport(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto value) {
if (jobReportBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
jobReport_ = value;
} else {
jobReportBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.JobReportProto job_report = 1;
*/
public Builder setJobReport(
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto.Builder builderForValue) {
if (jobReportBuilder_ == null) {
jobReport_ = builderForValue.build();
} else {
jobReportBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.JobReportProto job_report = 1;
*/
public Builder mergeJobReport(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto value) {
if (jobReportBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
jobReport_ != null &&
jobReport_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto.getDefaultInstance()) {
getJobReportBuilder().mergeFrom(value);
} else {
jobReport_ = value;
}
} else {
jobReportBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.JobReportProto job_report = 1;
*/
public Builder clearJobReport() {
bitField0_ = (bitField0_ & ~0x00000001);
jobReport_ = null;
if (jobReportBuilder_ != null) {
jobReportBuilder_.dispose();
jobReportBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.JobReportProto job_report = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto.Builder getJobReportBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getJobReportFieldBuilder().getBuilder();
}
/**
* optional .hadoop.mapreduce.JobReportProto job_report = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProtoOrBuilder getJobReportOrBuilder() {
if (jobReportBuilder_ != null) {
return jobReportBuilder_.getMessageOrBuilder();
} else {
return jobReport_ == null ?
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto.getDefaultInstance() : jobReport_;
}
}
/**
* optional .hadoop.mapreduce.JobReportProto job_report = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProtoOrBuilder>
getJobReportFieldBuilder() {
if (jobReportBuilder_ == null) {
jobReportBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProtoOrBuilder>(
getJobReport(),
getParentForChildren(),
isClean());
jobReport_ = null;
}
return jobReportBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.mapreduce.GetJobReportResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.mapreduce.GetJobReportResponseProto)
private static final org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto();
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetJobReportResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetTaskReportRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.mapreduce.GetTaskReportRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.mapreduce.TaskIdProto task_id = 1;
* @return Whether the taskId field is set.
*/
boolean hasTaskId();
/**
* optional .hadoop.mapreduce.TaskIdProto task_id = 1;
* @return The taskId.
*/
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto getTaskId();
/**
* optional .hadoop.mapreduce.TaskIdProto task_id = 1;
*/
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder getTaskIdOrBuilder();
}
/**
* Protobuf type {@code hadoop.mapreduce.GetTaskReportRequestProto}
*/
public static final class GetTaskReportRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.mapreduce.GetTaskReportRequestProto)
GetTaskReportRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetTaskReportRequestProto.newBuilder() to construct.
private GetTaskReportRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetTaskReportRequestProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetTaskReportRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto.Builder.class);
}
private int bitField0_;
public static final int TASK_ID_FIELD_NUMBER = 1;
private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto taskId_;
/**
* optional .hadoop.mapreduce.TaskIdProto task_id = 1;
* @return Whether the taskId field is set.
*/
@java.lang.Override
public boolean hasTaskId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.mapreduce.TaskIdProto task_id = 1;
* @return The taskId.
*/
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto getTaskId() {
return taskId_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.getDefaultInstance() : taskId_;
}
/**
* optional .hadoop.mapreduce.TaskIdProto task_id = 1;
*/
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder getTaskIdOrBuilder() {
return taskId_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.getDefaultInstance() : taskId_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getTaskId());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getTaskId());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto) obj;
if (hasTaskId() != other.hasTaskId()) return false;
if (hasTaskId()) {
if (!getTaskId()
.equals(other.getTaskId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasTaskId()) {
hash = (37 * hash) + TASK_ID_FIELD_NUMBER;
hash = (53 * hash) + getTaskId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.mapreduce.GetTaskReportRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.mapreduce.GetTaskReportRequestProto)
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getTaskIdFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
taskId_ = null;
if (taskIdBuilder_ != null) {
taskIdBuilder_.dispose();
taskIdBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto build() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto buildPartial() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.taskId_ = taskIdBuilder_ == null
? taskId_
: taskIdBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto) {
return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto other) {
if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto.getDefaultInstance()) return this;
if (other.hasTaskId()) {
mergeTaskId(other.getTaskId());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getTaskIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto taskId_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder> taskIdBuilder_;
/**
* optional .hadoop.mapreduce.TaskIdProto task_id = 1;
* @return Whether the taskId field is set.
*/
public boolean hasTaskId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.mapreduce.TaskIdProto task_id = 1;
* @return The taskId.
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto getTaskId() {
if (taskIdBuilder_ == null) {
return taskId_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.getDefaultInstance() : taskId_;
} else {
return taskIdBuilder_.getMessage();
}
}
/**
* optional .hadoop.mapreduce.TaskIdProto task_id = 1;
*/
public Builder setTaskId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto value) {
if (taskIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
taskId_ = value;
} else {
taskIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.TaskIdProto task_id = 1;
*/
public Builder setTaskId(
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.Builder builderForValue) {
if (taskIdBuilder_ == null) {
taskId_ = builderForValue.build();
} else {
taskIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.TaskIdProto task_id = 1;
*/
public Builder mergeTaskId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto value) {
if (taskIdBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
taskId_ != null &&
taskId_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.getDefaultInstance()) {
getTaskIdBuilder().mergeFrom(value);
} else {
taskId_ = value;
}
} else {
taskIdBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.TaskIdProto task_id = 1;
*/
public Builder clearTaskId() {
bitField0_ = (bitField0_ & ~0x00000001);
taskId_ = null;
if (taskIdBuilder_ != null) {
taskIdBuilder_.dispose();
taskIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.TaskIdProto task_id = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.Builder getTaskIdBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getTaskIdFieldBuilder().getBuilder();
}
/**
* optional .hadoop.mapreduce.TaskIdProto task_id = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder getTaskIdOrBuilder() {
if (taskIdBuilder_ != null) {
return taskIdBuilder_.getMessageOrBuilder();
} else {
return taskId_ == null ?
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.getDefaultInstance() : taskId_;
}
}
/**
* optional .hadoop.mapreduce.TaskIdProto task_id = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder>
getTaskIdFieldBuilder() {
if (taskIdBuilder_ == null) {
taskIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder>(
getTaskId(),
getParentForChildren(),
isClean());
taskId_ = null;
}
return taskIdBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.mapreduce.GetTaskReportRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.mapreduce.GetTaskReportRequestProto)
private static final org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto();
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetTaskReportRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetTaskReportResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.mapreduce.GetTaskReportResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.mapreduce.TaskReportProto task_report = 1;
* @return Whether the taskReport field is set.
*/
boolean hasTaskReport();
/**
* optional .hadoop.mapreduce.TaskReportProto task_report = 1;
* @return The taskReport.
*/
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto getTaskReport();
/**
* optional .hadoop.mapreduce.TaskReportProto task_report = 1;
*/
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProtoOrBuilder getTaskReportOrBuilder();
}
/**
* Protobuf type {@code hadoop.mapreduce.GetTaskReportResponseProto}
*/
public static final class GetTaskReportResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.mapreduce.GetTaskReportResponseProto)
GetTaskReportResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetTaskReportResponseProto.newBuilder() to construct.
private GetTaskReportResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetTaskReportResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetTaskReportResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto.Builder.class);
}
private int bitField0_;
public static final int TASK_REPORT_FIELD_NUMBER = 1;
private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto taskReport_;
/**
* optional .hadoop.mapreduce.TaskReportProto task_report = 1;
* @return Whether the taskReport field is set.
*/
@java.lang.Override
public boolean hasTaskReport() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.mapreduce.TaskReportProto task_report = 1;
* @return The taskReport.
*/
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto getTaskReport() {
return taskReport_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.getDefaultInstance() : taskReport_;
}
/**
* optional .hadoop.mapreduce.TaskReportProto task_report = 1;
*/
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProtoOrBuilder getTaskReportOrBuilder() {
return taskReport_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.getDefaultInstance() : taskReport_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getTaskReport());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getTaskReport());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto) obj;
if (hasTaskReport() != other.hasTaskReport()) return false;
if (hasTaskReport()) {
if (!getTaskReport()
.equals(other.getTaskReport())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasTaskReport()) {
hash = (37 * hash) + TASK_REPORT_FIELD_NUMBER;
hash = (53 * hash) + getTaskReport().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.mapreduce.GetTaskReportResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.mapreduce.GetTaskReportResponseProto)
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getTaskReportFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
taskReport_ = null;
if (taskReportBuilder_ != null) {
taskReportBuilder_.dispose();
taskReportBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto build() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto buildPartial() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.taskReport_ = taskReportBuilder_ == null
? taskReport_
: taskReportBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto) {
return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto other) {
if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto.getDefaultInstance()) return this;
if (other.hasTaskReport()) {
mergeTaskReport(other.getTaskReport());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getTaskReportFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto taskReport_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProtoOrBuilder> taskReportBuilder_;
/**
* optional .hadoop.mapreduce.TaskReportProto task_report = 1;
* @return Whether the taskReport field is set.
*/
public boolean hasTaskReport() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.mapreduce.TaskReportProto task_report = 1;
* @return The taskReport.
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto getTaskReport() {
if (taskReportBuilder_ == null) {
return taskReport_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.getDefaultInstance() : taskReport_;
} else {
return taskReportBuilder_.getMessage();
}
}
/**
* optional .hadoop.mapreduce.TaskReportProto task_report = 1;
*/
public Builder setTaskReport(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto value) {
if (taskReportBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
taskReport_ = value;
} else {
taskReportBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.TaskReportProto task_report = 1;
*/
public Builder setTaskReport(
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder builderForValue) {
if (taskReportBuilder_ == null) {
taskReport_ = builderForValue.build();
} else {
taskReportBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.TaskReportProto task_report = 1;
*/
public Builder mergeTaskReport(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto value) {
if (taskReportBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
taskReport_ != null &&
taskReport_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.getDefaultInstance()) {
getTaskReportBuilder().mergeFrom(value);
} else {
taskReport_ = value;
}
} else {
taskReportBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.TaskReportProto task_report = 1;
*/
public Builder clearTaskReport() {
bitField0_ = (bitField0_ & ~0x00000001);
taskReport_ = null;
if (taskReportBuilder_ != null) {
taskReportBuilder_.dispose();
taskReportBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.TaskReportProto task_report = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder getTaskReportBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getTaskReportFieldBuilder().getBuilder();
}
/**
* optional .hadoop.mapreduce.TaskReportProto task_report = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProtoOrBuilder getTaskReportOrBuilder() {
if (taskReportBuilder_ != null) {
return taskReportBuilder_.getMessageOrBuilder();
} else {
return taskReport_ == null ?
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.getDefaultInstance() : taskReport_;
}
}
/**
* optional .hadoop.mapreduce.TaskReportProto task_report = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProtoOrBuilder>
getTaskReportFieldBuilder() {
if (taskReportBuilder_ == null) {
taskReportBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProtoOrBuilder>(
getTaskReport(),
getParentForChildren(),
isClean());
taskReport_ = null;
}
return taskReportBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.mapreduce.GetTaskReportResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.mapreduce.GetTaskReportResponseProto)
private static final org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto();
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetTaskReportResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetTaskAttemptReportRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.mapreduce.GetTaskAttemptReportRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
* @return Whether the taskAttemptId field is set.
*/
boolean hasTaskAttemptId();
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
* @return The taskAttemptId.
*/
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getTaskAttemptId();
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
*/
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder getTaskAttemptIdOrBuilder();
}
/**
* Protobuf type {@code hadoop.mapreduce.GetTaskAttemptReportRequestProto}
*/
public static final class GetTaskAttemptReportRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.mapreduce.GetTaskAttemptReportRequestProto)
GetTaskAttemptReportRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetTaskAttemptReportRequestProto.newBuilder() to construct.
private GetTaskAttemptReportRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetTaskAttemptReportRequestProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetTaskAttemptReportRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptReportRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptReportRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto.Builder.class);
}
private int bitField0_;
public static final int TASK_ATTEMPT_ID_FIELD_NUMBER = 1;
private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto taskAttemptId_;
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
* @return Whether the taskAttemptId field is set.
*/
@java.lang.Override
public boolean hasTaskAttemptId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
* @return The taskAttemptId.
*/
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getTaskAttemptId() {
return taskAttemptId_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance() : taskAttemptId_;
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
*/
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder getTaskAttemptIdOrBuilder() {
return taskAttemptId_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance() : taskAttemptId_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getTaskAttemptId());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getTaskAttemptId());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto) obj;
if (hasTaskAttemptId() != other.hasTaskAttemptId()) return false;
if (hasTaskAttemptId()) {
if (!getTaskAttemptId()
.equals(other.getTaskAttemptId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasTaskAttemptId()) {
hash = (37 * hash) + TASK_ATTEMPT_ID_FIELD_NUMBER;
hash = (53 * hash) + getTaskAttemptId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.mapreduce.GetTaskAttemptReportRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.mapreduce.GetTaskAttemptReportRequestProto)
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptReportRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptReportRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getTaskAttemptIdFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
taskAttemptId_ = null;
if (taskAttemptIdBuilder_ != null) {
taskAttemptIdBuilder_.dispose();
taskAttemptIdBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptReportRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto build() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto buildPartial() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.taskAttemptId_ = taskAttemptIdBuilder_ == null
? taskAttemptId_
: taskAttemptIdBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto) {
return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto other) {
if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto.getDefaultInstance()) return this;
if (other.hasTaskAttemptId()) {
mergeTaskAttemptId(other.getTaskAttemptId());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getTaskAttemptIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto taskAttemptId_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder> taskAttemptIdBuilder_;
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
* @return Whether the taskAttemptId field is set.
*/
public boolean hasTaskAttemptId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
* @return The taskAttemptId.
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getTaskAttemptId() {
if (taskAttemptIdBuilder_ == null) {
return taskAttemptId_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance() : taskAttemptId_;
} else {
return taskAttemptIdBuilder_.getMessage();
}
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
*/
public Builder setTaskAttemptId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto value) {
if (taskAttemptIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
taskAttemptId_ = value;
} else {
taskAttemptIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
*/
public Builder setTaskAttemptId(
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder builderForValue) {
if (taskAttemptIdBuilder_ == null) {
taskAttemptId_ = builderForValue.build();
} else {
taskAttemptIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
*/
public Builder mergeTaskAttemptId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto value) {
if (taskAttemptIdBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
taskAttemptId_ != null &&
taskAttemptId_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance()) {
getTaskAttemptIdBuilder().mergeFrom(value);
} else {
taskAttemptId_ = value;
}
} else {
taskAttemptIdBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
*/
public Builder clearTaskAttemptId() {
bitField0_ = (bitField0_ & ~0x00000001);
taskAttemptId_ = null;
if (taskAttemptIdBuilder_ != null) {
taskAttemptIdBuilder_.dispose();
taskAttemptIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder getTaskAttemptIdBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getTaskAttemptIdFieldBuilder().getBuilder();
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder getTaskAttemptIdOrBuilder() {
if (taskAttemptIdBuilder_ != null) {
return taskAttemptIdBuilder_.getMessageOrBuilder();
} else {
return taskAttemptId_ == null ?
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance() : taskAttemptId_;
}
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder>
getTaskAttemptIdFieldBuilder() {
if (taskAttemptIdBuilder_ == null) {
taskAttemptIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder>(
getTaskAttemptId(),
getParentForChildren(),
isClean());
taskAttemptId_ = null;
}
return taskAttemptIdBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.mapreduce.GetTaskAttemptReportRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.mapreduce.GetTaskAttemptReportRequestProto)
private static final org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto();
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetTaskAttemptReportRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetTaskAttemptReportResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.mapreduce.GetTaskAttemptReportResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
* @return Whether the taskAttemptReport field is set.
*/
boolean hasTaskAttemptReport();
/**
* optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
* @return The taskAttemptReport.
*/
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto getTaskAttemptReport();
/**
* optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
*/
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProtoOrBuilder getTaskAttemptReportOrBuilder();
}
/**
* Protobuf type {@code hadoop.mapreduce.GetTaskAttemptReportResponseProto}
*/
public static final class GetTaskAttemptReportResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.mapreduce.GetTaskAttemptReportResponseProto)
GetTaskAttemptReportResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetTaskAttemptReportResponseProto.newBuilder() to construct.
private GetTaskAttemptReportResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetTaskAttemptReportResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetTaskAttemptReportResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptReportResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptReportResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto.Builder.class);
}
private int bitField0_;
public static final int TASK_ATTEMPT_REPORT_FIELD_NUMBER = 1;
private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto taskAttemptReport_;
/**
* optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
* @return Whether the taskAttemptReport field is set.
*/
@java.lang.Override
public boolean hasTaskAttemptReport() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
* @return The taskAttemptReport.
*/
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto getTaskAttemptReport() {
return taskAttemptReport_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.getDefaultInstance() : taskAttemptReport_;
}
/**
* optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
*/
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProtoOrBuilder getTaskAttemptReportOrBuilder() {
return taskAttemptReport_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.getDefaultInstance() : taskAttemptReport_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getTaskAttemptReport());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getTaskAttemptReport());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto) obj;
if (hasTaskAttemptReport() != other.hasTaskAttemptReport()) return false;
if (hasTaskAttemptReport()) {
if (!getTaskAttemptReport()
.equals(other.getTaskAttemptReport())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasTaskAttemptReport()) {
hash = (37 * hash) + TASK_ATTEMPT_REPORT_FIELD_NUMBER;
hash = (53 * hash) + getTaskAttemptReport().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.mapreduce.GetTaskAttemptReportResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.mapreduce.GetTaskAttemptReportResponseProto)
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptReportResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptReportResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getTaskAttemptReportFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
taskAttemptReport_ = null;
if (taskAttemptReportBuilder_ != null) {
taskAttemptReportBuilder_.dispose();
taskAttemptReportBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptReportResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto build() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto buildPartial() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.taskAttemptReport_ = taskAttemptReportBuilder_ == null
? taskAttemptReport_
: taskAttemptReportBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto) {
return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto other) {
if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto.getDefaultInstance()) return this;
if (other.hasTaskAttemptReport()) {
mergeTaskAttemptReport(other.getTaskAttemptReport());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getTaskAttemptReportFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto taskAttemptReport_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProtoOrBuilder> taskAttemptReportBuilder_;
/**
* optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
* @return Whether the taskAttemptReport field is set.
*/
public boolean hasTaskAttemptReport() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
* @return The taskAttemptReport.
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto getTaskAttemptReport() {
if (taskAttemptReportBuilder_ == null) {
return taskAttemptReport_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.getDefaultInstance() : taskAttemptReport_;
} else {
return taskAttemptReportBuilder_.getMessage();
}
}
/**
* optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
*/
public Builder setTaskAttemptReport(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto value) {
if (taskAttemptReportBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
taskAttemptReport_ = value;
} else {
taskAttemptReportBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
*/
public Builder setTaskAttemptReport(
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.Builder builderForValue) {
if (taskAttemptReportBuilder_ == null) {
taskAttemptReport_ = builderForValue.build();
} else {
taskAttemptReportBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
*/
public Builder mergeTaskAttemptReport(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto value) {
if (taskAttemptReportBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
taskAttemptReport_ != null &&
taskAttemptReport_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.getDefaultInstance()) {
getTaskAttemptReportBuilder().mergeFrom(value);
} else {
taskAttemptReport_ = value;
}
} else {
taskAttemptReportBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
*/
public Builder clearTaskAttemptReport() {
bitField0_ = (bitField0_ & ~0x00000001);
taskAttemptReport_ = null;
if (taskAttemptReportBuilder_ != null) {
taskAttemptReportBuilder_.dispose();
taskAttemptReportBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.Builder getTaskAttemptReportBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getTaskAttemptReportFieldBuilder().getBuilder();
}
/**
* optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProtoOrBuilder getTaskAttemptReportOrBuilder() {
if (taskAttemptReportBuilder_ != null) {
return taskAttemptReportBuilder_.getMessageOrBuilder();
} else {
return taskAttemptReport_ == null ?
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.getDefaultInstance() : taskAttemptReport_;
}
}
/**
* optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProtoOrBuilder>
getTaskAttemptReportFieldBuilder() {
if (taskAttemptReportBuilder_ == null) {
taskAttemptReportBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProtoOrBuilder>(
getTaskAttemptReport(),
getParentForChildren(),
isClean());
taskAttemptReport_ = null;
}
return taskAttemptReportBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.mapreduce.GetTaskAttemptReportResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.mapreduce.GetTaskAttemptReportResponseProto)
private static final org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto();
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetTaskAttemptReportResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetCountersRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.mapreduce.GetCountersRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
* @return Whether the jobId field is set.
*/
boolean hasJobId();
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
* @return The jobId.
*/
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getJobId();
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder getJobIdOrBuilder();
}
/**
* Protobuf type {@code hadoop.mapreduce.GetCountersRequestProto}
*/
public static final class GetCountersRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.mapreduce.GetCountersRequestProto)
GetCountersRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetCountersRequestProto.newBuilder() to construct.
private GetCountersRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetCountersRequestProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetCountersRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetCountersRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetCountersRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto.Builder.class);
}
private int bitField0_;
public static final int JOB_ID_FIELD_NUMBER = 1;
private org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto jobId_;
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
* @return Whether the jobId field is set.
*/
@java.lang.Override
public boolean hasJobId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
* @return The jobId.
*/
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getJobId() {
return jobId_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance() : jobId_;
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder getJobIdOrBuilder() {
return jobId_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance() : jobId_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getJobId());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getJobId());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto) obj;
if (hasJobId() != other.hasJobId()) return false;
if (hasJobId()) {
if (!getJobId()
.equals(other.getJobId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasJobId()) {
hash = (37 * hash) + JOB_ID_FIELD_NUMBER;
hash = (53 * hash) + getJobId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.mapreduce.GetCountersRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.mapreduce.GetCountersRequestProto)
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetCountersRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetCountersRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getJobIdFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
jobId_ = null;
if (jobIdBuilder_ != null) {
jobIdBuilder_.dispose();
jobIdBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetCountersRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto build() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto buildPartial() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.jobId_ = jobIdBuilder_ == null
? jobId_
: jobIdBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto) {
return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto other) {
if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto.getDefaultInstance()) return this;
if (other.hasJobId()) {
mergeJobId(other.getJobId());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getJobIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto jobId_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder> jobIdBuilder_;
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
* @return Whether the jobId field is set.
*/
public boolean hasJobId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
* @return The jobId.
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getJobId() {
if (jobIdBuilder_ == null) {
return jobId_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance() : jobId_;
} else {
return jobIdBuilder_.getMessage();
}
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
public Builder setJobId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto value) {
if (jobIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
jobId_ = value;
} else {
jobIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
public Builder setJobId(
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder builderForValue) {
if (jobIdBuilder_ == null) {
jobId_ = builderForValue.build();
} else {
jobIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
public Builder mergeJobId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto value) {
if (jobIdBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
jobId_ != null &&
jobId_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance()) {
getJobIdBuilder().mergeFrom(value);
} else {
jobId_ = value;
}
} else {
jobIdBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
public Builder clearJobId() {
bitField0_ = (bitField0_ & ~0x00000001);
jobId_ = null;
if (jobIdBuilder_ != null) {
jobIdBuilder_.dispose();
jobIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder getJobIdBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getJobIdFieldBuilder().getBuilder();
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder getJobIdOrBuilder() {
if (jobIdBuilder_ != null) {
return jobIdBuilder_.getMessageOrBuilder();
} else {
return jobId_ == null ?
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance() : jobId_;
}
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder>
getJobIdFieldBuilder() {
if (jobIdBuilder_ == null) {
jobIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder>(
getJobId(),
getParentForChildren(),
isClean());
jobId_ = null;
}
return jobIdBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.mapreduce.GetCountersRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.mapreduce.GetCountersRequestProto)
private static final org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto();
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetCountersRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetCountersResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.mapreduce.GetCountersResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.mapreduce.CountersProto counters = 1;
* @return Whether the counters field is set.
*/
boolean hasCounters();
/**
* optional .hadoop.mapreduce.CountersProto counters = 1;
* @return The counters.
*/
org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto getCounters();
/**
* optional .hadoop.mapreduce.CountersProto counters = 1;
*/
org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProtoOrBuilder getCountersOrBuilder();
}
/**
* Protobuf type {@code hadoop.mapreduce.GetCountersResponseProto}
*/
public static final class GetCountersResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.mapreduce.GetCountersResponseProto)
GetCountersResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetCountersResponseProto.newBuilder() to construct.
private GetCountersResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetCountersResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetCountersResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetCountersResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetCountersResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto.Builder.class);
}
private int bitField0_;
public static final int COUNTERS_FIELD_NUMBER = 1;
private org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto counters_;
/**
* optional .hadoop.mapreduce.CountersProto counters = 1;
* @return Whether the counters field is set.
*/
@java.lang.Override
public boolean hasCounters() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.mapreduce.CountersProto counters = 1;
* @return The counters.
*/
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto getCounters() {
return counters_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.getDefaultInstance() : counters_;
}
/**
* optional .hadoop.mapreduce.CountersProto counters = 1;
*/
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProtoOrBuilder getCountersOrBuilder() {
return counters_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.getDefaultInstance() : counters_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getCounters());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getCounters());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto) obj;
if (hasCounters() != other.hasCounters()) return false;
if (hasCounters()) {
if (!getCounters()
.equals(other.getCounters())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasCounters()) {
hash = (37 * hash) + COUNTERS_FIELD_NUMBER;
hash = (53 * hash) + getCounters().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.mapreduce.GetCountersResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.mapreduce.GetCountersResponseProto)
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetCountersResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetCountersResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getCountersFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
counters_ = null;
if (countersBuilder_ != null) {
countersBuilder_.dispose();
countersBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetCountersResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto build() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto buildPartial() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.counters_ = countersBuilder_ == null
? counters_
: countersBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto) {
return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto other) {
if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto.getDefaultInstance()) return this;
if (other.hasCounters()) {
mergeCounters(other.getCounters());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getCountersFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto counters_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProtoOrBuilder> countersBuilder_;
/**
* optional .hadoop.mapreduce.CountersProto counters = 1;
* @return Whether the counters field is set.
*/
public boolean hasCounters() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.mapreduce.CountersProto counters = 1;
* @return The counters.
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto getCounters() {
if (countersBuilder_ == null) {
return counters_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.getDefaultInstance() : counters_;
} else {
return countersBuilder_.getMessage();
}
}
/**
* optional .hadoop.mapreduce.CountersProto counters = 1;
*/
public Builder setCounters(org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto value) {
if (countersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
counters_ = value;
} else {
countersBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.CountersProto counters = 1;
*/
public Builder setCounters(
org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.Builder builderForValue) {
if (countersBuilder_ == null) {
counters_ = builderForValue.build();
} else {
countersBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.CountersProto counters = 1;
*/
public Builder mergeCounters(org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto value) {
if (countersBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
counters_ != null &&
counters_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.getDefaultInstance()) {
getCountersBuilder().mergeFrom(value);
} else {
counters_ = value;
}
} else {
countersBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.CountersProto counters = 1;
*/
public Builder clearCounters() {
bitField0_ = (bitField0_ & ~0x00000001);
counters_ = null;
if (countersBuilder_ != null) {
countersBuilder_.dispose();
countersBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.CountersProto counters = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.Builder getCountersBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getCountersFieldBuilder().getBuilder();
}
/**
* optional .hadoop.mapreduce.CountersProto counters = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProtoOrBuilder getCountersOrBuilder() {
if (countersBuilder_ != null) {
return countersBuilder_.getMessageOrBuilder();
} else {
return counters_ == null ?
org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.getDefaultInstance() : counters_;
}
}
/**
* optional .hadoop.mapreduce.CountersProto counters = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProtoOrBuilder>
getCountersFieldBuilder() {
if (countersBuilder_ == null) {
countersBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProtoOrBuilder>(
getCounters(),
getParentForChildren(),
isClean());
counters_ = null;
}
return countersBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.mapreduce.GetCountersResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.mapreduce.GetCountersResponseProto)
private static final org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto();
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetCountersResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetTaskAttemptCompletionEventsRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.mapreduce.GetTaskAttemptCompletionEventsRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
* @return Whether the jobId field is set.
*/
boolean hasJobId();
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
* @return The jobId.
*/
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getJobId();
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder getJobIdOrBuilder();
/**
* optional int32 from_event_id = 2;
* @return Whether the fromEventId field is set.
*/
boolean hasFromEventId();
/**
* optional int32 from_event_id = 2;
* @return The fromEventId.
*/
int getFromEventId();
/**
* optional int32 max_events = 3;
* @return Whether the maxEvents field is set.
*/
boolean hasMaxEvents();
/**
* optional int32 max_events = 3;
* @return The maxEvents.
*/
int getMaxEvents();
}
/**
* Protobuf type {@code hadoop.mapreduce.GetTaskAttemptCompletionEventsRequestProto}
*/
public static final class GetTaskAttemptCompletionEventsRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.mapreduce.GetTaskAttemptCompletionEventsRequestProto)
GetTaskAttemptCompletionEventsRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetTaskAttemptCompletionEventsRequestProto.newBuilder() to construct.
private GetTaskAttemptCompletionEventsRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetTaskAttemptCompletionEventsRequestProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetTaskAttemptCompletionEventsRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto.Builder.class);
}
private int bitField0_;
public static final int JOB_ID_FIELD_NUMBER = 1;
private org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto jobId_;
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
* @return Whether the jobId field is set.
*/
@java.lang.Override
public boolean hasJobId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
* @return The jobId.
*/
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getJobId() {
return jobId_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance() : jobId_;
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder getJobIdOrBuilder() {
return jobId_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance() : jobId_;
}
public static final int FROM_EVENT_ID_FIELD_NUMBER = 2;
private int fromEventId_ = 0;
/**
* optional int32 from_event_id = 2;
* @return Whether the fromEventId field is set.
*/
@java.lang.Override
public boolean hasFromEventId() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional int32 from_event_id = 2;
* @return The fromEventId.
*/
@java.lang.Override
public int getFromEventId() {
return fromEventId_;
}
public static final int MAX_EVENTS_FIELD_NUMBER = 3;
private int maxEvents_ = 0;
/**
* optional int32 max_events = 3;
* @return Whether the maxEvents field is set.
*/
@java.lang.Override
public boolean hasMaxEvents() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional int32 max_events = 3;
* @return The maxEvents.
*/
@java.lang.Override
public int getMaxEvents() {
return maxEvents_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getJobId());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeInt32(2, fromEventId_);
}
if (((bitField0_ & 0x00000004) != 0)) {
output.writeInt32(3, maxEvents_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getJobId());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt32Size(2, fromEventId_);
}
if (((bitField0_ & 0x00000004) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeInt32Size(3, maxEvents_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto) obj;
if (hasJobId() != other.hasJobId()) return false;
if (hasJobId()) {
if (!getJobId()
.equals(other.getJobId())) return false;
}
if (hasFromEventId() != other.hasFromEventId()) return false;
if (hasFromEventId()) {
if (getFromEventId()
!= other.getFromEventId()) return false;
}
if (hasMaxEvents() != other.hasMaxEvents()) return false;
if (hasMaxEvents()) {
if (getMaxEvents()
!= other.getMaxEvents()) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasJobId()) {
hash = (37 * hash) + JOB_ID_FIELD_NUMBER;
hash = (53 * hash) + getJobId().hashCode();
}
if (hasFromEventId()) {
hash = (37 * hash) + FROM_EVENT_ID_FIELD_NUMBER;
hash = (53 * hash) + getFromEventId();
}
if (hasMaxEvents()) {
hash = (37 * hash) + MAX_EVENTS_FIELD_NUMBER;
hash = (53 * hash) + getMaxEvents();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.mapreduce.GetTaskAttemptCompletionEventsRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.mapreduce.GetTaskAttemptCompletionEventsRequestProto)
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getJobIdFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
jobId_ = null;
if (jobIdBuilder_ != null) {
jobIdBuilder_.dispose();
jobIdBuilder_ = null;
}
fromEventId_ = 0;
maxEvents_ = 0;
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto build() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto buildPartial() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.jobId_ = jobIdBuilder_ == null
? jobId_
: jobIdBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.fromEventId_ = fromEventId_;
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.maxEvents_ = maxEvents_;
to_bitField0_ |= 0x00000004;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto) {
return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto other) {
if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto.getDefaultInstance()) return this;
if (other.hasJobId()) {
mergeJobId(other.getJobId());
}
if (other.hasFromEventId()) {
setFromEventId(other.getFromEventId());
}
if (other.hasMaxEvents()) {
setMaxEvents(other.getMaxEvents());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getJobIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 16: {
fromEventId_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 24: {
maxEvents_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 24
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto jobId_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder> jobIdBuilder_;
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
* @return Whether the jobId field is set.
*/
public boolean hasJobId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
* @return The jobId.
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getJobId() {
if (jobIdBuilder_ == null) {
return jobId_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance() : jobId_;
} else {
return jobIdBuilder_.getMessage();
}
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
public Builder setJobId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto value) {
if (jobIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
jobId_ = value;
} else {
jobIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
public Builder setJobId(
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder builderForValue) {
if (jobIdBuilder_ == null) {
jobId_ = builderForValue.build();
} else {
jobIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
public Builder mergeJobId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto value) {
if (jobIdBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
jobId_ != null &&
jobId_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance()) {
getJobIdBuilder().mergeFrom(value);
} else {
jobId_ = value;
}
} else {
jobIdBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
public Builder clearJobId() {
bitField0_ = (bitField0_ & ~0x00000001);
jobId_ = null;
if (jobIdBuilder_ != null) {
jobIdBuilder_.dispose();
jobIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder getJobIdBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getJobIdFieldBuilder().getBuilder();
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder getJobIdOrBuilder() {
if (jobIdBuilder_ != null) {
return jobIdBuilder_.getMessageOrBuilder();
} else {
return jobId_ == null ?
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance() : jobId_;
}
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder>
getJobIdFieldBuilder() {
if (jobIdBuilder_ == null) {
jobIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder>(
getJobId(),
getParentForChildren(),
isClean());
jobId_ = null;
}
return jobIdBuilder_;
}
private int fromEventId_ ;
/**
* optional int32 from_event_id = 2;
* @return Whether the fromEventId field is set.
*/
@java.lang.Override
public boolean hasFromEventId() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional int32 from_event_id = 2;
* @return The fromEventId.
*/
@java.lang.Override
public int getFromEventId() {
return fromEventId_;
}
/**
* optional int32 from_event_id = 2;
* @param value The fromEventId to set.
* @return This builder for chaining.
*/
public Builder setFromEventId(int value) {
fromEventId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* optional int32 from_event_id = 2;
* @return This builder for chaining.
*/
public Builder clearFromEventId() {
bitField0_ = (bitField0_ & ~0x00000002);
fromEventId_ = 0;
onChanged();
return this;
}
private int maxEvents_ ;
/**
* optional int32 max_events = 3;
* @return Whether the maxEvents field is set.
*/
@java.lang.Override
public boolean hasMaxEvents() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional int32 max_events = 3;
* @return The maxEvents.
*/
@java.lang.Override
public int getMaxEvents() {
return maxEvents_;
}
/**
* optional int32 max_events = 3;
* @param value The maxEvents to set.
* @return This builder for chaining.
*/
public Builder setMaxEvents(int value) {
maxEvents_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* optional int32 max_events = 3;
* @return This builder for chaining.
*/
public Builder clearMaxEvents() {
bitField0_ = (bitField0_ & ~0x00000004);
maxEvents_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.mapreduce.GetTaskAttemptCompletionEventsRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.mapreduce.GetTaskAttemptCompletionEventsRequestProto)
private static final org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto();
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetTaskAttemptCompletionEventsRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetTaskAttemptCompletionEventsResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.mapreduce.GetTaskAttemptCompletionEventsResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
*/
java.util.List
getCompletionEventsList();
/**
* repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
*/
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto getCompletionEvents(int index);
/**
* repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
*/
int getCompletionEventsCount();
/**
* repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
*/
java.util.List extends org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProtoOrBuilder>
getCompletionEventsOrBuilderList();
/**
* repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
*/
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProtoOrBuilder getCompletionEventsOrBuilder(
int index);
}
/**
* Protobuf type {@code hadoop.mapreduce.GetTaskAttemptCompletionEventsResponseProto}
*/
public static final class GetTaskAttemptCompletionEventsResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.mapreduce.GetTaskAttemptCompletionEventsResponseProto)
GetTaskAttemptCompletionEventsResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetTaskAttemptCompletionEventsResponseProto.newBuilder() to construct.
private GetTaskAttemptCompletionEventsResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetTaskAttemptCompletionEventsResponseProto() {
completionEvents_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetTaskAttemptCompletionEventsResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto.Builder.class);
}
public static final int COMPLETION_EVENTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List completionEvents_;
/**
* repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
*/
@java.lang.Override
public java.util.List getCompletionEventsList() {
return completionEvents_;
}
/**
* repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
*/
@java.lang.Override
public java.util.List extends org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProtoOrBuilder>
getCompletionEventsOrBuilderList() {
return completionEvents_;
}
/**
* repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
*/
@java.lang.Override
public int getCompletionEventsCount() {
return completionEvents_.size();
}
/**
* repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
*/
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto getCompletionEvents(int index) {
return completionEvents_.get(index);
}
/**
* repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
*/
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProtoOrBuilder getCompletionEventsOrBuilder(
int index) {
return completionEvents_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < completionEvents_.size(); i++) {
output.writeMessage(1, completionEvents_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < completionEvents_.size(); i++) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, completionEvents_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto) obj;
if (!getCompletionEventsList()
.equals(other.getCompletionEventsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getCompletionEventsCount() > 0) {
hash = (37 * hash) + COMPLETION_EVENTS_FIELD_NUMBER;
hash = (53 * hash) + getCompletionEventsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.mapreduce.GetTaskAttemptCompletionEventsResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.mapreduce.GetTaskAttemptCompletionEventsResponseProto)
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (completionEventsBuilder_ == null) {
completionEvents_ = java.util.Collections.emptyList();
} else {
completionEvents_ = null;
completionEventsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto build() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto buildPartial() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto result) {
if (completionEventsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
completionEvents_ = java.util.Collections.unmodifiableList(completionEvents_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.completionEvents_ = completionEvents_;
} else {
result.completionEvents_ = completionEventsBuilder_.build();
}
}
private void buildPartial0(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto) {
return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto other) {
if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto.getDefaultInstance()) return this;
if (completionEventsBuilder_ == null) {
if (!other.completionEvents_.isEmpty()) {
if (completionEvents_.isEmpty()) {
completionEvents_ = other.completionEvents_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureCompletionEventsIsMutable();
completionEvents_.addAll(other.completionEvents_);
}
onChanged();
}
} else {
if (!other.completionEvents_.isEmpty()) {
if (completionEventsBuilder_.isEmpty()) {
completionEventsBuilder_.dispose();
completionEventsBuilder_ = null;
completionEvents_ = other.completionEvents_;
bitField0_ = (bitField0_ & ~0x00000001);
completionEventsBuilder_ =
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getCompletionEventsFieldBuilder() : null;
} else {
completionEventsBuilder_.addAllMessages(other.completionEvents_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto m =
input.readMessage(
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto.PARSER,
extensionRegistry);
if (completionEventsBuilder_ == null) {
ensureCompletionEventsIsMutable();
completionEvents_.add(m);
} else {
completionEventsBuilder_.addMessage(m);
}
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List completionEvents_ =
java.util.Collections.emptyList();
private void ensureCompletionEventsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
completionEvents_ = new java.util.ArrayList(completionEvents_);
bitField0_ |= 0x00000001;
}
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProtoOrBuilder> completionEventsBuilder_;
/**
* repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
*/
public java.util.List getCompletionEventsList() {
if (completionEventsBuilder_ == null) {
return java.util.Collections.unmodifiableList(completionEvents_);
} else {
return completionEventsBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
*/
public int getCompletionEventsCount() {
if (completionEventsBuilder_ == null) {
return completionEvents_.size();
} else {
return completionEventsBuilder_.getCount();
}
}
/**
* repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto getCompletionEvents(int index) {
if (completionEventsBuilder_ == null) {
return completionEvents_.get(index);
} else {
return completionEventsBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
*/
public Builder setCompletionEvents(
int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto value) {
if (completionEventsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCompletionEventsIsMutable();
completionEvents_.set(index, value);
onChanged();
} else {
completionEventsBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
*/
public Builder setCompletionEvents(
int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto.Builder builderForValue) {
if (completionEventsBuilder_ == null) {
ensureCompletionEventsIsMutable();
completionEvents_.set(index, builderForValue.build());
onChanged();
} else {
completionEventsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
*/
public Builder addCompletionEvents(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto value) {
if (completionEventsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCompletionEventsIsMutable();
completionEvents_.add(value);
onChanged();
} else {
completionEventsBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
*/
public Builder addCompletionEvents(
int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto value) {
if (completionEventsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCompletionEventsIsMutable();
completionEvents_.add(index, value);
onChanged();
} else {
completionEventsBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
*/
public Builder addCompletionEvents(
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto.Builder builderForValue) {
if (completionEventsBuilder_ == null) {
ensureCompletionEventsIsMutable();
completionEvents_.add(builderForValue.build());
onChanged();
} else {
completionEventsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
*/
public Builder addCompletionEvents(
int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto.Builder builderForValue) {
if (completionEventsBuilder_ == null) {
ensureCompletionEventsIsMutable();
completionEvents_.add(index, builderForValue.build());
onChanged();
} else {
completionEventsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
*/
public Builder addAllCompletionEvents(
java.lang.Iterable extends org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto> values) {
if (completionEventsBuilder_ == null) {
ensureCompletionEventsIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, completionEvents_);
onChanged();
} else {
completionEventsBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
*/
public Builder clearCompletionEvents() {
if (completionEventsBuilder_ == null) {
completionEvents_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
completionEventsBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
*/
public Builder removeCompletionEvents(int index) {
if (completionEventsBuilder_ == null) {
ensureCompletionEventsIsMutable();
completionEvents_.remove(index);
onChanged();
} else {
completionEventsBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto.Builder getCompletionEventsBuilder(
int index) {
return getCompletionEventsFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProtoOrBuilder getCompletionEventsOrBuilder(
int index) {
if (completionEventsBuilder_ == null) {
return completionEvents_.get(index); } else {
return completionEventsBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
*/
public java.util.List extends org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProtoOrBuilder>
getCompletionEventsOrBuilderList() {
if (completionEventsBuilder_ != null) {
return completionEventsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(completionEvents_);
}
}
/**
* repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto.Builder addCompletionEventsBuilder() {
return getCompletionEventsFieldBuilder().addBuilder(
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto.getDefaultInstance());
}
/**
* repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto.Builder addCompletionEventsBuilder(
int index) {
return getCompletionEventsFieldBuilder().addBuilder(
index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto.getDefaultInstance());
}
/**
* repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
*/
public java.util.List
getCompletionEventsBuilderList() {
return getCompletionEventsFieldBuilder().getBuilderList();
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProtoOrBuilder>
getCompletionEventsFieldBuilder() {
if (completionEventsBuilder_ == null) {
completionEventsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProtoOrBuilder>(
completionEvents_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
completionEvents_ = null;
}
return completionEventsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.mapreduce.GetTaskAttemptCompletionEventsResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.mapreduce.GetTaskAttemptCompletionEventsResponseProto)
private static final org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto();
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetTaskAttemptCompletionEventsResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetTaskReportsRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.mapreduce.GetTaskReportsRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
* @return Whether the jobId field is set.
*/
boolean hasJobId();
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
* @return The jobId.
*/
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getJobId();
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder getJobIdOrBuilder();
/**
* optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
* @return Whether the taskType field is set.
*/
boolean hasTaskType();
/**
* optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
* @return The taskType.
*/
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto getTaskType();
}
/**
* Protobuf type {@code hadoop.mapreduce.GetTaskReportsRequestProto}
*/
public static final class GetTaskReportsRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.mapreduce.GetTaskReportsRequestProto)
GetTaskReportsRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetTaskReportsRequestProto.newBuilder() to construct.
private GetTaskReportsRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetTaskReportsRequestProto() {
taskType_ = 1;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetTaskReportsRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportsRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportsRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto.Builder.class);
}
private int bitField0_;
public static final int JOB_ID_FIELD_NUMBER = 1;
private org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto jobId_;
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
* @return Whether the jobId field is set.
*/
@java.lang.Override
public boolean hasJobId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
* @return The jobId.
*/
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getJobId() {
return jobId_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance() : jobId_;
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder getJobIdOrBuilder() {
return jobId_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance() : jobId_;
}
public static final int TASK_TYPE_FIELD_NUMBER = 2;
private int taskType_ = 1;
/**
* optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
* @return Whether the taskType field is set.
*/
@java.lang.Override public boolean hasTaskType() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
* @return The taskType.
*/
@java.lang.Override public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto getTaskType() {
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto result = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto.forNumber(taskType_);
return result == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto.MAP : result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getJobId());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeEnum(2, taskType_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getJobId());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeEnumSize(2, taskType_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto) obj;
if (hasJobId() != other.hasJobId()) return false;
if (hasJobId()) {
if (!getJobId()
.equals(other.getJobId())) return false;
}
if (hasTaskType() != other.hasTaskType()) return false;
if (hasTaskType()) {
if (taskType_ != other.taskType_) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasJobId()) {
hash = (37 * hash) + JOB_ID_FIELD_NUMBER;
hash = (53 * hash) + getJobId().hashCode();
}
if (hasTaskType()) {
hash = (37 * hash) + TASK_TYPE_FIELD_NUMBER;
hash = (53 * hash) + taskType_;
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.mapreduce.GetTaskReportsRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.mapreduce.GetTaskReportsRequestProto)
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportsRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportsRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getJobIdFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
jobId_ = null;
if (jobIdBuilder_ != null) {
jobIdBuilder_.dispose();
jobIdBuilder_ = null;
}
taskType_ = 1;
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportsRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto build() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto buildPartial() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.jobId_ = jobIdBuilder_ == null
? jobId_
: jobIdBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.taskType_ = taskType_;
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto) {
return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto other) {
if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto.getDefaultInstance()) return this;
if (other.hasJobId()) {
mergeJobId(other.getJobId());
}
if (other.hasTaskType()) {
setTaskType(other.getTaskType());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getJobIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 16: {
int tmpRaw = input.readEnum();
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto tmpValue =
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto.forNumber(tmpRaw);
if (tmpValue == null) {
mergeUnknownVarintField(2, tmpRaw);
} else {
taskType_ = tmpRaw;
bitField0_ |= 0x00000002;
}
break;
} // case 16
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto jobId_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder> jobIdBuilder_;
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
* @return Whether the jobId field is set.
*/
public boolean hasJobId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
* @return The jobId.
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getJobId() {
if (jobIdBuilder_ == null) {
return jobId_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance() : jobId_;
} else {
return jobIdBuilder_.getMessage();
}
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
public Builder setJobId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto value) {
if (jobIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
jobId_ = value;
} else {
jobIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
public Builder setJobId(
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder builderForValue) {
if (jobIdBuilder_ == null) {
jobId_ = builderForValue.build();
} else {
jobIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
public Builder mergeJobId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto value) {
if (jobIdBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
jobId_ != null &&
jobId_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance()) {
getJobIdBuilder().mergeFrom(value);
} else {
jobId_ = value;
}
} else {
jobIdBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
public Builder clearJobId() {
bitField0_ = (bitField0_ & ~0x00000001);
jobId_ = null;
if (jobIdBuilder_ != null) {
jobIdBuilder_.dispose();
jobIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder getJobIdBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getJobIdFieldBuilder().getBuilder();
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder getJobIdOrBuilder() {
if (jobIdBuilder_ != null) {
return jobIdBuilder_.getMessageOrBuilder();
} else {
return jobId_ == null ?
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance() : jobId_;
}
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder>
getJobIdFieldBuilder() {
if (jobIdBuilder_ == null) {
jobIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder>(
getJobId(),
getParentForChildren(),
isClean());
jobId_ = null;
}
return jobIdBuilder_;
}
private int taskType_ = 1;
/**
* optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
* @return Whether the taskType field is set.
*/
@java.lang.Override public boolean hasTaskType() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
* @return The taskType.
*/
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto getTaskType() {
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto result = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto.forNumber(taskType_);
return result == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto.MAP : result;
}
/**
* optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
* @param value The taskType to set.
* @return This builder for chaining.
*/
public Builder setTaskType(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
taskType_ = value.getNumber();
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
* @return This builder for chaining.
*/
public Builder clearTaskType() {
bitField0_ = (bitField0_ & ~0x00000002);
taskType_ = 1;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.mapreduce.GetTaskReportsRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.mapreduce.GetTaskReportsRequestProto)
private static final org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto();
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetTaskReportsRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetTaskReportsResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.mapreduce.GetTaskReportsResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
*/
java.util.List
getTaskReportsList();
/**
* repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
*/
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto getTaskReports(int index);
/**
* repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
*/
int getTaskReportsCount();
/**
* repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
*/
java.util.List extends org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProtoOrBuilder>
getTaskReportsOrBuilderList();
/**
* repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
*/
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProtoOrBuilder getTaskReportsOrBuilder(
int index);
}
/**
* Protobuf type {@code hadoop.mapreduce.GetTaskReportsResponseProto}
*/
public static final class GetTaskReportsResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.mapreduce.GetTaskReportsResponseProto)
GetTaskReportsResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetTaskReportsResponseProto.newBuilder() to construct.
private GetTaskReportsResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetTaskReportsResponseProto() {
taskReports_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetTaskReportsResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportsResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportsResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto.Builder.class);
}
public static final int TASK_REPORTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List taskReports_;
/**
* repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
*/
@java.lang.Override
public java.util.List getTaskReportsList() {
return taskReports_;
}
/**
* repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
*/
@java.lang.Override
public java.util.List extends org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProtoOrBuilder>
getTaskReportsOrBuilderList() {
return taskReports_;
}
/**
* repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
*/
@java.lang.Override
public int getTaskReportsCount() {
return taskReports_.size();
}
/**
* repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
*/
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto getTaskReports(int index) {
return taskReports_.get(index);
}
/**
* repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
*/
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProtoOrBuilder getTaskReportsOrBuilder(
int index) {
return taskReports_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < taskReports_.size(); i++) {
output.writeMessage(1, taskReports_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < taskReports_.size(); i++) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, taskReports_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto) obj;
if (!getTaskReportsList()
.equals(other.getTaskReportsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getTaskReportsCount() > 0) {
hash = (37 * hash) + TASK_REPORTS_FIELD_NUMBER;
hash = (53 * hash) + getTaskReportsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.mapreduce.GetTaskReportsResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.mapreduce.GetTaskReportsResponseProto)
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportsResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportsResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (taskReportsBuilder_ == null) {
taskReports_ = java.util.Collections.emptyList();
} else {
taskReports_ = null;
taskReportsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportsResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto build() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto buildPartial() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto result) {
if (taskReportsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
taskReports_ = java.util.Collections.unmodifiableList(taskReports_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.taskReports_ = taskReports_;
} else {
result.taskReports_ = taskReportsBuilder_.build();
}
}
private void buildPartial0(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto) {
return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto other) {
if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto.getDefaultInstance()) return this;
if (taskReportsBuilder_ == null) {
if (!other.taskReports_.isEmpty()) {
if (taskReports_.isEmpty()) {
taskReports_ = other.taskReports_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureTaskReportsIsMutable();
taskReports_.addAll(other.taskReports_);
}
onChanged();
}
} else {
if (!other.taskReports_.isEmpty()) {
if (taskReportsBuilder_.isEmpty()) {
taskReportsBuilder_.dispose();
taskReportsBuilder_ = null;
taskReports_ = other.taskReports_;
bitField0_ = (bitField0_ & ~0x00000001);
taskReportsBuilder_ =
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getTaskReportsFieldBuilder() : null;
} else {
taskReportsBuilder_.addAllMessages(other.taskReports_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto m =
input.readMessage(
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.PARSER,
extensionRegistry);
if (taskReportsBuilder_ == null) {
ensureTaskReportsIsMutable();
taskReports_.add(m);
} else {
taskReportsBuilder_.addMessage(m);
}
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List taskReports_ =
java.util.Collections.emptyList();
private void ensureTaskReportsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
taskReports_ = new java.util.ArrayList(taskReports_);
bitField0_ |= 0x00000001;
}
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProtoOrBuilder> taskReportsBuilder_;
/**
* repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
*/
public java.util.List getTaskReportsList() {
if (taskReportsBuilder_ == null) {
return java.util.Collections.unmodifiableList(taskReports_);
} else {
return taskReportsBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
*/
public int getTaskReportsCount() {
if (taskReportsBuilder_ == null) {
return taskReports_.size();
} else {
return taskReportsBuilder_.getCount();
}
}
/**
* repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto getTaskReports(int index) {
if (taskReportsBuilder_ == null) {
return taskReports_.get(index);
} else {
return taskReportsBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
*/
public Builder setTaskReports(
int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto value) {
if (taskReportsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTaskReportsIsMutable();
taskReports_.set(index, value);
onChanged();
} else {
taskReportsBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
*/
public Builder setTaskReports(
int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder builderForValue) {
if (taskReportsBuilder_ == null) {
ensureTaskReportsIsMutable();
taskReports_.set(index, builderForValue.build());
onChanged();
} else {
taskReportsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
*/
public Builder addTaskReports(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto value) {
if (taskReportsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTaskReportsIsMutable();
taskReports_.add(value);
onChanged();
} else {
taskReportsBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
*/
public Builder addTaskReports(
int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto value) {
if (taskReportsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTaskReportsIsMutable();
taskReports_.add(index, value);
onChanged();
} else {
taskReportsBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
*/
public Builder addTaskReports(
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder builderForValue) {
if (taskReportsBuilder_ == null) {
ensureTaskReportsIsMutable();
taskReports_.add(builderForValue.build());
onChanged();
} else {
taskReportsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
*/
public Builder addTaskReports(
int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder builderForValue) {
if (taskReportsBuilder_ == null) {
ensureTaskReportsIsMutable();
taskReports_.add(index, builderForValue.build());
onChanged();
} else {
taskReportsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
*/
public Builder addAllTaskReports(
java.lang.Iterable extends org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto> values) {
if (taskReportsBuilder_ == null) {
ensureTaskReportsIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, taskReports_);
onChanged();
} else {
taskReportsBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
*/
public Builder clearTaskReports() {
if (taskReportsBuilder_ == null) {
taskReports_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
taskReportsBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
*/
public Builder removeTaskReports(int index) {
if (taskReportsBuilder_ == null) {
ensureTaskReportsIsMutable();
taskReports_.remove(index);
onChanged();
} else {
taskReportsBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder getTaskReportsBuilder(
int index) {
return getTaskReportsFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProtoOrBuilder getTaskReportsOrBuilder(
int index) {
if (taskReportsBuilder_ == null) {
return taskReports_.get(index); } else {
return taskReportsBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
*/
public java.util.List extends org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProtoOrBuilder>
getTaskReportsOrBuilderList() {
if (taskReportsBuilder_ != null) {
return taskReportsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(taskReports_);
}
}
/**
* repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder addTaskReportsBuilder() {
return getTaskReportsFieldBuilder().addBuilder(
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.getDefaultInstance());
}
/**
* repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder addTaskReportsBuilder(
int index) {
return getTaskReportsFieldBuilder().addBuilder(
index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.getDefaultInstance());
}
/**
* repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
*/
public java.util.List
getTaskReportsBuilderList() {
return getTaskReportsFieldBuilder().getBuilderList();
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProtoOrBuilder>
getTaskReportsFieldBuilder() {
if (taskReportsBuilder_ == null) {
taskReportsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProtoOrBuilder>(
taskReports_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
taskReports_ = null;
}
return taskReportsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.mapreduce.GetTaskReportsResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.mapreduce.GetTaskReportsResponseProto)
private static final org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto();
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetTaskReportsResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetDiagnosticsRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.mapreduce.GetDiagnosticsRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
* @return Whether the taskAttemptId field is set.
*/
boolean hasTaskAttemptId();
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
* @return The taskAttemptId.
*/
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getTaskAttemptId();
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
*/
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder getTaskAttemptIdOrBuilder();
}
/**
* Protobuf type {@code hadoop.mapreduce.GetDiagnosticsRequestProto}
*/
public static final class GetDiagnosticsRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.mapreduce.GetDiagnosticsRequestProto)
GetDiagnosticsRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetDiagnosticsRequestProto.newBuilder() to construct.
private GetDiagnosticsRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetDiagnosticsRequestProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetDiagnosticsRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetDiagnosticsRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetDiagnosticsRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto.Builder.class);
}
private int bitField0_;
public static final int TASK_ATTEMPT_ID_FIELD_NUMBER = 1;
private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto taskAttemptId_;
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
* @return Whether the taskAttemptId field is set.
*/
@java.lang.Override
public boolean hasTaskAttemptId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
* @return The taskAttemptId.
*/
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getTaskAttemptId() {
return taskAttemptId_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance() : taskAttemptId_;
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
*/
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder getTaskAttemptIdOrBuilder() {
return taskAttemptId_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance() : taskAttemptId_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getTaskAttemptId());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getTaskAttemptId());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto) obj;
if (hasTaskAttemptId() != other.hasTaskAttemptId()) return false;
if (hasTaskAttemptId()) {
if (!getTaskAttemptId()
.equals(other.getTaskAttemptId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasTaskAttemptId()) {
hash = (37 * hash) + TASK_ATTEMPT_ID_FIELD_NUMBER;
hash = (53 * hash) + getTaskAttemptId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.mapreduce.GetDiagnosticsRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.mapreduce.GetDiagnosticsRequestProto)
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetDiagnosticsRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetDiagnosticsRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getTaskAttemptIdFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
taskAttemptId_ = null;
if (taskAttemptIdBuilder_ != null) {
taskAttemptIdBuilder_.dispose();
taskAttemptIdBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetDiagnosticsRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto build() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto buildPartial() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.taskAttemptId_ = taskAttemptIdBuilder_ == null
? taskAttemptId_
: taskAttemptIdBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto) {
return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto other) {
if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto.getDefaultInstance()) return this;
if (other.hasTaskAttemptId()) {
mergeTaskAttemptId(other.getTaskAttemptId());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getTaskAttemptIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto taskAttemptId_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder> taskAttemptIdBuilder_;
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
* @return Whether the taskAttemptId field is set.
*/
public boolean hasTaskAttemptId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
* @return The taskAttemptId.
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getTaskAttemptId() {
if (taskAttemptIdBuilder_ == null) {
return taskAttemptId_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance() : taskAttemptId_;
} else {
return taskAttemptIdBuilder_.getMessage();
}
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
*/
public Builder setTaskAttemptId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto value) {
if (taskAttemptIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
taskAttemptId_ = value;
} else {
taskAttemptIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
*/
public Builder setTaskAttemptId(
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder builderForValue) {
if (taskAttemptIdBuilder_ == null) {
taskAttemptId_ = builderForValue.build();
} else {
taskAttemptIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
*/
public Builder mergeTaskAttemptId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto value) {
if (taskAttemptIdBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
taskAttemptId_ != null &&
taskAttemptId_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance()) {
getTaskAttemptIdBuilder().mergeFrom(value);
} else {
taskAttemptId_ = value;
}
} else {
taskAttemptIdBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
*/
public Builder clearTaskAttemptId() {
bitField0_ = (bitField0_ & ~0x00000001);
taskAttemptId_ = null;
if (taskAttemptIdBuilder_ != null) {
taskAttemptIdBuilder_.dispose();
taskAttemptIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder getTaskAttemptIdBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getTaskAttemptIdFieldBuilder().getBuilder();
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder getTaskAttemptIdOrBuilder() {
if (taskAttemptIdBuilder_ != null) {
return taskAttemptIdBuilder_.getMessageOrBuilder();
} else {
return taskAttemptId_ == null ?
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance() : taskAttemptId_;
}
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder>
getTaskAttemptIdFieldBuilder() {
if (taskAttemptIdBuilder_ == null) {
taskAttemptIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder>(
getTaskAttemptId(),
getParentForChildren(),
isClean());
taskAttemptId_ = null;
}
return taskAttemptIdBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.mapreduce.GetDiagnosticsRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.mapreduce.GetDiagnosticsRequestProto)
private static final org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto();
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetDiagnosticsRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetDiagnosticsResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.mapreduce.GetDiagnosticsResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* repeated string diagnostics = 1;
* @return A list containing the diagnostics.
*/
java.util.List
getDiagnosticsList();
/**
* repeated string diagnostics = 1;
* @return The count of diagnostics.
*/
int getDiagnosticsCount();
/**
* repeated string diagnostics = 1;
* @param index The index of the element to return.
* @return The diagnostics at the given index.
*/
java.lang.String getDiagnostics(int index);
/**
* repeated string diagnostics = 1;
* @param index The index of the value to return.
* @return The bytes of the diagnostics at the given index.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getDiagnosticsBytes(int index);
}
/**
* Protobuf type {@code hadoop.mapreduce.GetDiagnosticsResponseProto}
*/
public static final class GetDiagnosticsResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.mapreduce.GetDiagnosticsResponseProto)
GetDiagnosticsResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetDiagnosticsResponseProto.newBuilder() to construct.
private GetDiagnosticsResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetDiagnosticsResponseProto() {
diagnostics_ =
org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetDiagnosticsResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetDiagnosticsResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetDiagnosticsResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto.Builder.class);
}
public static final int DIAGNOSTICS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList diagnostics_ =
org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
/**
* repeated string diagnostics = 1;
* @return A list containing the diagnostics.
*/
public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
getDiagnosticsList() {
return diagnostics_;
}
/**
* repeated string diagnostics = 1;
* @return The count of diagnostics.
*/
public int getDiagnosticsCount() {
return diagnostics_.size();
}
/**
* repeated string diagnostics = 1;
* @param index The index of the element to return.
* @return The diagnostics at the given index.
*/
public java.lang.String getDiagnostics(int index) {
return diagnostics_.get(index);
}
/**
* repeated string diagnostics = 1;
* @param index The index of the value to return.
* @return The bytes of the diagnostics at the given index.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getDiagnosticsBytes(int index) {
return diagnostics_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < diagnostics_.size(); i++) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, diagnostics_.getRaw(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < diagnostics_.size(); i++) {
dataSize += computeStringSizeNoTag(diagnostics_.getRaw(i));
}
size += dataSize;
size += 1 * getDiagnosticsList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto) obj;
if (!getDiagnosticsList()
.equals(other.getDiagnosticsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getDiagnosticsCount() > 0) {
hash = (37 * hash) + DIAGNOSTICS_FIELD_NUMBER;
hash = (53 * hash) + getDiagnosticsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.mapreduce.GetDiagnosticsResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.mapreduce.GetDiagnosticsResponseProto)
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetDiagnosticsResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetDiagnosticsResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
diagnostics_ =
org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetDiagnosticsResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto build() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto buildPartial() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
diagnostics_.makeImmutable();
result.diagnostics_ = diagnostics_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto) {
return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto other) {
if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto.getDefaultInstance()) return this;
if (!other.diagnostics_.isEmpty()) {
if (diagnostics_.isEmpty()) {
diagnostics_ = other.diagnostics_;
bitField0_ |= 0x00000001;
} else {
ensureDiagnosticsIsMutable();
diagnostics_.addAll(other.diagnostics_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
ensureDiagnosticsIsMutable();
diagnostics_.add(bs);
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList diagnostics_ =
org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
private void ensureDiagnosticsIsMutable() {
if (!diagnostics_.isModifiable()) {
diagnostics_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(diagnostics_);
}
bitField0_ |= 0x00000001;
}
/**
* repeated string diagnostics = 1;
* @return A list containing the diagnostics.
*/
public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
getDiagnosticsList() {
diagnostics_.makeImmutable();
return diagnostics_;
}
/**
* repeated string diagnostics = 1;
* @return The count of diagnostics.
*/
public int getDiagnosticsCount() {
return diagnostics_.size();
}
/**
* repeated string diagnostics = 1;
* @param index The index of the element to return.
* @return The diagnostics at the given index.
*/
public java.lang.String getDiagnostics(int index) {
return diagnostics_.get(index);
}
/**
* repeated string diagnostics = 1;
* @param index The index of the value to return.
* @return The bytes of the diagnostics at the given index.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getDiagnosticsBytes(int index) {
return diagnostics_.getByteString(index);
}
/**
* repeated string diagnostics = 1;
* @param index The index to set the value at.
* @param value The diagnostics to set.
* @return This builder for chaining.
*/
public Builder setDiagnostics(
int index, java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureDiagnosticsIsMutable();
diagnostics_.set(index, value);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* repeated string diagnostics = 1;
* @param value The diagnostics to add.
* @return This builder for chaining.
*/
public Builder addDiagnostics(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureDiagnosticsIsMutable();
diagnostics_.add(value);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* repeated string diagnostics = 1;
* @param values The diagnostics to add.
* @return This builder for chaining.
*/
public Builder addAllDiagnostics(
java.lang.Iterable values) {
ensureDiagnosticsIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, diagnostics_);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* repeated string diagnostics = 1;
* @return This builder for chaining.
*/
public Builder clearDiagnostics() {
diagnostics_ =
org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);;
onChanged();
return this;
}
/**
* repeated string diagnostics = 1;
* @param value The bytes of the diagnostics to add.
* @return This builder for chaining.
*/
public Builder addDiagnosticsBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
ensureDiagnosticsIsMutable();
diagnostics_.add(value);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.mapreduce.GetDiagnosticsResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.mapreduce.GetDiagnosticsResponseProto)
private static final org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto();
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetDiagnosticsResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface KillJobRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.mapreduce.KillJobRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
* @return Whether the jobId field is set.
*/
boolean hasJobId();
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
* @return The jobId.
*/
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getJobId();
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder getJobIdOrBuilder();
}
/**
* Protobuf type {@code hadoop.mapreduce.KillJobRequestProto}
*/
public static final class KillJobRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.mapreduce.KillJobRequestProto)
KillJobRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use KillJobRequestProto.newBuilder() to construct.
private KillJobRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private KillJobRequestProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new KillJobRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillJobRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillJobRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto.Builder.class);
}
private int bitField0_;
public static final int JOB_ID_FIELD_NUMBER = 1;
private org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto jobId_;
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
* @return Whether the jobId field is set.
*/
@java.lang.Override
public boolean hasJobId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
* @return The jobId.
*/
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getJobId() {
return jobId_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance() : jobId_;
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder getJobIdOrBuilder() {
return jobId_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance() : jobId_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getJobId());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getJobId());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto) obj;
if (hasJobId() != other.hasJobId()) return false;
if (hasJobId()) {
if (!getJobId()
.equals(other.getJobId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasJobId()) {
hash = (37 * hash) + JOB_ID_FIELD_NUMBER;
hash = (53 * hash) + getJobId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.mapreduce.KillJobRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.mapreduce.KillJobRequestProto)
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillJobRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillJobRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getJobIdFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
jobId_ = null;
if (jobIdBuilder_ != null) {
jobIdBuilder_.dispose();
jobIdBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillJobRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto build() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto buildPartial() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.jobId_ = jobIdBuilder_ == null
? jobId_
: jobIdBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto) {
return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto other) {
if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto.getDefaultInstance()) return this;
if (other.hasJobId()) {
mergeJobId(other.getJobId());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getJobIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto jobId_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder> jobIdBuilder_;
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
* @return Whether the jobId field is set.
*/
public boolean hasJobId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
* @return The jobId.
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getJobId() {
if (jobIdBuilder_ == null) {
return jobId_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance() : jobId_;
} else {
return jobIdBuilder_.getMessage();
}
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
public Builder setJobId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto value) {
if (jobIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
jobId_ = value;
} else {
jobIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
public Builder setJobId(
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder builderForValue) {
if (jobIdBuilder_ == null) {
jobId_ = builderForValue.build();
} else {
jobIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
public Builder mergeJobId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto value) {
if (jobIdBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
jobId_ != null &&
jobId_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance()) {
getJobIdBuilder().mergeFrom(value);
} else {
jobId_ = value;
}
} else {
jobIdBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
public Builder clearJobId() {
bitField0_ = (bitField0_ & ~0x00000001);
jobId_ = null;
if (jobIdBuilder_ != null) {
jobIdBuilder_.dispose();
jobIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder getJobIdBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getJobIdFieldBuilder().getBuilder();
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder getJobIdOrBuilder() {
if (jobIdBuilder_ != null) {
return jobIdBuilder_.getMessageOrBuilder();
} else {
return jobId_ == null ?
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance() : jobId_;
}
}
/**
* optional .hadoop.mapreduce.JobIdProto job_id = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder>
getJobIdFieldBuilder() {
if (jobIdBuilder_ == null) {
jobIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder>(
getJobId(),
getParentForChildren(),
isClean());
jobId_ = null;
}
return jobIdBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.mapreduce.KillJobRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.mapreduce.KillJobRequestProto)
private static final org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto();
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public KillJobRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface KillJobResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.mapreduce.KillJobResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.mapreduce.KillJobResponseProto}
*/
public static final class KillJobResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.mapreduce.KillJobResponseProto)
KillJobResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use KillJobResponseProto.newBuilder() to construct.
private KillJobResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private KillJobResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new KillJobResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillJobResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillJobResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.mapreduce.KillJobResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.mapreduce.KillJobResponseProto)
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillJobResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillJobResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillJobResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto build() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto buildPartial() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto) {
return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto other) {
if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.mapreduce.KillJobResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.mapreduce.KillJobResponseProto)
private static final org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto();
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public KillJobResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface KillTaskRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.mapreduce.KillTaskRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.mapreduce.TaskIdProto task_id = 1;
* @return Whether the taskId field is set.
*/
boolean hasTaskId();
/**
* optional .hadoop.mapreduce.TaskIdProto task_id = 1;
* @return The taskId.
*/
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto getTaskId();
/**
* optional .hadoop.mapreduce.TaskIdProto task_id = 1;
*/
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder getTaskIdOrBuilder();
}
/**
* Protobuf type {@code hadoop.mapreduce.KillTaskRequestProto}
*/
public static final class KillTaskRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.mapreduce.KillTaskRequestProto)
KillTaskRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use KillTaskRequestProto.newBuilder() to construct.
private KillTaskRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private KillTaskRequestProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new KillTaskRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto.Builder.class);
}
private int bitField0_;
public static final int TASK_ID_FIELD_NUMBER = 1;
private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto taskId_;
/**
* optional .hadoop.mapreduce.TaskIdProto task_id = 1;
* @return Whether the taskId field is set.
*/
@java.lang.Override
public boolean hasTaskId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.mapreduce.TaskIdProto task_id = 1;
* @return The taskId.
*/
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto getTaskId() {
return taskId_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.getDefaultInstance() : taskId_;
}
/**
* optional .hadoop.mapreduce.TaskIdProto task_id = 1;
*/
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder getTaskIdOrBuilder() {
return taskId_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.getDefaultInstance() : taskId_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getTaskId());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getTaskId());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto) obj;
if (hasTaskId() != other.hasTaskId()) return false;
if (hasTaskId()) {
if (!getTaskId()
.equals(other.getTaskId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasTaskId()) {
hash = (37 * hash) + TASK_ID_FIELD_NUMBER;
hash = (53 * hash) + getTaskId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.mapreduce.KillTaskRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.mapreduce.KillTaskRequestProto)
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getTaskIdFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
taskId_ = null;
if (taskIdBuilder_ != null) {
taskIdBuilder_.dispose();
taskIdBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto build() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto buildPartial() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.taskId_ = taskIdBuilder_ == null
? taskId_
: taskIdBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto) {
return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto other) {
if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto.getDefaultInstance()) return this;
if (other.hasTaskId()) {
mergeTaskId(other.getTaskId());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getTaskIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto taskId_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder> taskIdBuilder_;
/**
* optional .hadoop.mapreduce.TaskIdProto task_id = 1;
* @return Whether the taskId field is set.
*/
public boolean hasTaskId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.mapreduce.TaskIdProto task_id = 1;
* @return The taskId.
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto getTaskId() {
if (taskIdBuilder_ == null) {
return taskId_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.getDefaultInstance() : taskId_;
} else {
return taskIdBuilder_.getMessage();
}
}
/**
* optional .hadoop.mapreduce.TaskIdProto task_id = 1;
*/
public Builder setTaskId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto value) {
if (taskIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
taskId_ = value;
} else {
taskIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.TaskIdProto task_id = 1;
*/
public Builder setTaskId(
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.Builder builderForValue) {
if (taskIdBuilder_ == null) {
taskId_ = builderForValue.build();
} else {
taskIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.TaskIdProto task_id = 1;
*/
public Builder mergeTaskId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto value) {
if (taskIdBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
taskId_ != null &&
taskId_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.getDefaultInstance()) {
getTaskIdBuilder().mergeFrom(value);
} else {
taskId_ = value;
}
} else {
taskIdBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.TaskIdProto task_id = 1;
*/
public Builder clearTaskId() {
bitField0_ = (bitField0_ & ~0x00000001);
taskId_ = null;
if (taskIdBuilder_ != null) {
taskIdBuilder_.dispose();
taskIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.TaskIdProto task_id = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.Builder getTaskIdBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getTaskIdFieldBuilder().getBuilder();
}
/**
* optional .hadoop.mapreduce.TaskIdProto task_id = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder getTaskIdOrBuilder() {
if (taskIdBuilder_ != null) {
return taskIdBuilder_.getMessageOrBuilder();
} else {
return taskId_ == null ?
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.getDefaultInstance() : taskId_;
}
}
/**
* optional .hadoop.mapreduce.TaskIdProto task_id = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder>
getTaskIdFieldBuilder() {
if (taskIdBuilder_ == null) {
taskIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder>(
getTaskId(),
getParentForChildren(),
isClean());
taskId_ = null;
}
return taskIdBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.mapreduce.KillTaskRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.mapreduce.KillTaskRequestProto)
private static final org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto();
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public KillTaskRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface KillTaskResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.mapreduce.KillTaskResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.mapreduce.KillTaskResponseProto}
*/
public static final class KillTaskResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.mapreduce.KillTaskResponseProto)
KillTaskResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use KillTaskResponseProto.newBuilder() to construct.
private KillTaskResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private KillTaskResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new KillTaskResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.mapreduce.KillTaskResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.mapreduce.KillTaskResponseProto)
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto build() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto buildPartial() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto) {
return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto other) {
if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.mapreduce.KillTaskResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.mapreduce.KillTaskResponseProto)
private static final org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto();
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public KillTaskResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface KillTaskAttemptRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.mapreduce.KillTaskAttemptRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
* @return Whether the taskAttemptId field is set.
*/
boolean hasTaskAttemptId();
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
* @return The taskAttemptId.
*/
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getTaskAttemptId();
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
*/
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder getTaskAttemptIdOrBuilder();
}
/**
* Protobuf type {@code hadoop.mapreduce.KillTaskAttemptRequestProto}
*/
public static final class KillTaskAttemptRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.mapreduce.KillTaskAttemptRequestProto)
KillTaskAttemptRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use KillTaskAttemptRequestProto.newBuilder() to construct.
private KillTaskAttemptRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private KillTaskAttemptRequestProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new KillTaskAttemptRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskAttemptRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskAttemptRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto.Builder.class);
}
private int bitField0_;
public static final int TASK_ATTEMPT_ID_FIELD_NUMBER = 1;
private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto taskAttemptId_;
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
* @return Whether the taskAttemptId field is set.
*/
@java.lang.Override
public boolean hasTaskAttemptId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
* @return The taskAttemptId.
*/
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getTaskAttemptId() {
return taskAttemptId_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance() : taskAttemptId_;
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
*/
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder getTaskAttemptIdOrBuilder() {
return taskAttemptId_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance() : taskAttemptId_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getTaskAttemptId());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getTaskAttemptId());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto) obj;
if (hasTaskAttemptId() != other.hasTaskAttemptId()) return false;
if (hasTaskAttemptId()) {
if (!getTaskAttemptId()
.equals(other.getTaskAttemptId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasTaskAttemptId()) {
hash = (37 * hash) + TASK_ATTEMPT_ID_FIELD_NUMBER;
hash = (53 * hash) + getTaskAttemptId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.mapreduce.KillTaskAttemptRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.mapreduce.KillTaskAttemptRequestProto)
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskAttemptRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskAttemptRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getTaskAttemptIdFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
taskAttemptId_ = null;
if (taskAttemptIdBuilder_ != null) {
taskAttemptIdBuilder_.dispose();
taskAttemptIdBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskAttemptRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto build() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto buildPartial() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.taskAttemptId_ = taskAttemptIdBuilder_ == null
? taskAttemptId_
: taskAttemptIdBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto) {
return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto other) {
if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto.getDefaultInstance()) return this;
if (other.hasTaskAttemptId()) {
mergeTaskAttemptId(other.getTaskAttemptId());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getTaskAttemptIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto taskAttemptId_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder> taskAttemptIdBuilder_;
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
* @return Whether the taskAttemptId field is set.
*/
public boolean hasTaskAttemptId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
* @return The taskAttemptId.
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getTaskAttemptId() {
if (taskAttemptIdBuilder_ == null) {
return taskAttemptId_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance() : taskAttemptId_;
} else {
return taskAttemptIdBuilder_.getMessage();
}
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
*/
public Builder setTaskAttemptId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto value) {
if (taskAttemptIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
taskAttemptId_ = value;
} else {
taskAttemptIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
*/
public Builder setTaskAttemptId(
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder builderForValue) {
if (taskAttemptIdBuilder_ == null) {
taskAttemptId_ = builderForValue.build();
} else {
taskAttemptIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
*/
public Builder mergeTaskAttemptId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto value) {
if (taskAttemptIdBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
taskAttemptId_ != null &&
taskAttemptId_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance()) {
getTaskAttemptIdBuilder().mergeFrom(value);
} else {
taskAttemptId_ = value;
}
} else {
taskAttemptIdBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
*/
public Builder clearTaskAttemptId() {
bitField0_ = (bitField0_ & ~0x00000001);
taskAttemptId_ = null;
if (taskAttemptIdBuilder_ != null) {
taskAttemptIdBuilder_.dispose();
taskAttemptIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder getTaskAttemptIdBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getTaskAttemptIdFieldBuilder().getBuilder();
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder getTaskAttemptIdOrBuilder() {
if (taskAttemptIdBuilder_ != null) {
return taskAttemptIdBuilder_.getMessageOrBuilder();
} else {
return taskAttemptId_ == null ?
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance() : taskAttemptId_;
}
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder>
getTaskAttemptIdFieldBuilder() {
if (taskAttemptIdBuilder_ == null) {
taskAttemptIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder>(
getTaskAttemptId(),
getParentForChildren(),
isClean());
taskAttemptId_ = null;
}
return taskAttemptIdBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.mapreduce.KillTaskAttemptRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.mapreduce.KillTaskAttemptRequestProto)
private static final org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto();
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public KillTaskAttemptRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface KillTaskAttemptResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.mapreduce.KillTaskAttemptResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.mapreduce.KillTaskAttemptResponseProto}
*/
public static final class KillTaskAttemptResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.mapreduce.KillTaskAttemptResponseProto)
KillTaskAttemptResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use KillTaskAttemptResponseProto.newBuilder() to construct.
private KillTaskAttemptResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private KillTaskAttemptResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new KillTaskAttemptResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskAttemptResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskAttemptResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.mapreduce.KillTaskAttemptResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.mapreduce.KillTaskAttemptResponseProto)
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskAttemptResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskAttemptResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskAttemptResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto build() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto buildPartial() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto) {
return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto other) {
if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.mapreduce.KillTaskAttemptResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.mapreduce.KillTaskAttemptResponseProto)
private static final org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto();
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public KillTaskAttemptResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface FailTaskAttemptRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.mapreduce.FailTaskAttemptRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
* @return Whether the taskAttemptId field is set.
*/
boolean hasTaskAttemptId();
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
* @return The taskAttemptId.
*/
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getTaskAttemptId();
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
*/
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder getTaskAttemptIdOrBuilder();
}
/**
* Protobuf type {@code hadoop.mapreduce.FailTaskAttemptRequestProto}
*/
public static final class FailTaskAttemptRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.mapreduce.FailTaskAttemptRequestProto)
FailTaskAttemptRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use FailTaskAttemptRequestProto.newBuilder() to construct.
private FailTaskAttemptRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private FailTaskAttemptRequestProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new FailTaskAttemptRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_FailTaskAttemptRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_FailTaskAttemptRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto.Builder.class);
}
private int bitField0_;
public static final int TASK_ATTEMPT_ID_FIELD_NUMBER = 1;
private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto taskAttemptId_;
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
* @return Whether the taskAttemptId field is set.
*/
@java.lang.Override
public boolean hasTaskAttemptId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
* @return The taskAttemptId.
*/
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getTaskAttemptId() {
return taskAttemptId_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance() : taskAttemptId_;
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
*/
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder getTaskAttemptIdOrBuilder() {
return taskAttemptId_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance() : taskAttemptId_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getTaskAttemptId());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getTaskAttemptId());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto) obj;
if (hasTaskAttemptId() != other.hasTaskAttemptId()) return false;
if (hasTaskAttemptId()) {
if (!getTaskAttemptId()
.equals(other.getTaskAttemptId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasTaskAttemptId()) {
hash = (37 * hash) + TASK_ATTEMPT_ID_FIELD_NUMBER;
hash = (53 * hash) + getTaskAttemptId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.mapreduce.FailTaskAttemptRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.mapreduce.FailTaskAttemptRequestProto)
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_FailTaskAttemptRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_FailTaskAttemptRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getTaskAttemptIdFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
taskAttemptId_ = null;
if (taskAttemptIdBuilder_ != null) {
taskAttemptIdBuilder_.dispose();
taskAttemptIdBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_FailTaskAttemptRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto build() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto buildPartial() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.taskAttemptId_ = taskAttemptIdBuilder_ == null
? taskAttemptId_
: taskAttemptIdBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto) {
return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto other) {
if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto.getDefaultInstance()) return this;
if (other.hasTaskAttemptId()) {
mergeTaskAttemptId(other.getTaskAttemptId());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getTaskAttemptIdFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto taskAttemptId_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder> taskAttemptIdBuilder_;
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
* @return Whether the taskAttemptId field is set.
*/
public boolean hasTaskAttemptId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
* @return The taskAttemptId.
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getTaskAttemptId() {
if (taskAttemptIdBuilder_ == null) {
return taskAttemptId_ == null ? org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance() : taskAttemptId_;
} else {
return taskAttemptIdBuilder_.getMessage();
}
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
*/
public Builder setTaskAttemptId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto value) {
if (taskAttemptIdBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
taskAttemptId_ = value;
} else {
taskAttemptIdBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
*/
public Builder setTaskAttemptId(
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder builderForValue) {
if (taskAttemptIdBuilder_ == null) {
taskAttemptId_ = builderForValue.build();
} else {
taskAttemptIdBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
*/
public Builder mergeTaskAttemptId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto value) {
if (taskAttemptIdBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
taskAttemptId_ != null &&
taskAttemptId_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance()) {
getTaskAttemptIdBuilder().mergeFrom(value);
} else {
taskAttemptId_ = value;
}
} else {
taskAttemptIdBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
*/
public Builder clearTaskAttemptId() {
bitField0_ = (bitField0_ & ~0x00000001);
taskAttemptId_ = null;
if (taskAttemptIdBuilder_ != null) {
taskAttemptIdBuilder_.dispose();
taskAttemptIdBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder getTaskAttemptIdBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getTaskAttemptIdFieldBuilder().getBuilder();
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
*/
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder getTaskAttemptIdOrBuilder() {
if (taskAttemptIdBuilder_ != null) {
return taskAttemptIdBuilder_.getMessageOrBuilder();
} else {
return taskAttemptId_ == null ?
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance() : taskAttemptId_;
}
}
/**
* optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder>
getTaskAttemptIdFieldBuilder() {
if (taskAttemptIdBuilder_ == null) {
taskAttemptIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder>(
getTaskAttemptId(),
getParentForChildren(),
isClean());
taskAttemptId_ = null;
}
return taskAttemptIdBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.mapreduce.FailTaskAttemptRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.mapreduce.FailTaskAttemptRequestProto)
private static final org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto();
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public FailTaskAttemptRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface FailTaskAttemptResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.mapreduce.FailTaskAttemptResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.mapreduce.FailTaskAttemptResponseProto}
*/
public static final class FailTaskAttemptResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.mapreduce.FailTaskAttemptResponseProto)
FailTaskAttemptResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use FailTaskAttemptResponseProto.newBuilder() to construct.
private FailTaskAttemptResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private FailTaskAttemptResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new FailTaskAttemptResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_FailTaskAttemptResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_FailTaskAttemptResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.mapreduce.FailTaskAttemptResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.mapreduce.FailTaskAttemptResponseProto)
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_FailTaskAttemptResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_FailTaskAttemptResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_FailTaskAttemptResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto build() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto buildPartial() {
org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto) {
return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto other) {
if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.mapreduce.FailTaskAttemptResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.mapreduce.FailTaskAttemptResponseProto)
private static final org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto();
}
public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public FailTaskAttemptResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_mapreduce_GetJobReportRequestProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_mapreduce_GetJobReportRequestProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_mapreduce_GetJobReportResponseProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_mapreduce_GetJobReportResponseProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_mapreduce_GetTaskReportRequestProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_mapreduce_GetTaskReportRequestProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_mapreduce_GetTaskReportResponseProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_mapreduce_GetTaskReportResponseProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_mapreduce_GetTaskAttemptReportRequestProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_mapreduce_GetTaskAttemptReportRequestProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_mapreduce_GetTaskAttemptReportResponseProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_mapreduce_GetTaskAttemptReportResponseProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_mapreduce_GetCountersRequestProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_mapreduce_GetCountersRequestProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_mapreduce_GetCountersResponseProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_mapreduce_GetCountersResponseProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsRequestProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsRequestProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsResponseProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsResponseProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_mapreduce_GetTaskReportsRequestProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_mapreduce_GetTaskReportsRequestProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_mapreduce_GetTaskReportsResponseProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_mapreduce_GetTaskReportsResponseProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_mapreduce_GetDiagnosticsRequestProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_mapreduce_GetDiagnosticsRequestProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_mapreduce_GetDiagnosticsResponseProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_mapreduce_GetDiagnosticsResponseProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_mapreduce_KillJobRequestProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_mapreduce_KillJobRequestProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_mapreduce_KillJobResponseProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_mapreduce_KillJobResponseProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_mapreduce_KillTaskRequestProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_mapreduce_KillTaskRequestProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_mapreduce_KillTaskResponseProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_mapreduce_KillTaskResponseProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_mapreduce_KillTaskAttemptRequestProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_mapreduce_KillTaskAttemptRequestProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_mapreduce_KillTaskAttemptResponseProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_mapreduce_KillTaskAttemptResponseProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_mapreduce_FailTaskAttemptRequestProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_mapreduce_FailTaskAttemptRequestProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_mapreduce_FailTaskAttemptResponseProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_mapreduce_FailTaskAttemptResponseProto_fieldAccessorTable;
public static org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\027mr_service_protos.proto\022\020hadoop.mapred" +
"uce\032\016Security.proto\032\017mr_protos.proto\032\021ya" +
"rn_protos.proto\"H\n\030GetJobReportRequestPr" +
"oto\022,\n\006job_id\030\001 \001(\0132\034.hadoop.mapreduce.J" +
"obIdProto\"Q\n\031GetJobReportResponseProto\0224" +
"\n\njob_report\030\001 \001(\0132 .hadoop.mapreduce.Jo" +
"bReportProto\"K\n\031GetTaskReportRequestProt" +
"o\022.\n\007task_id\030\001 \001(\0132\035.hadoop.mapreduce.Ta" +
"skIdProto\"T\n\032GetTaskReportResponseProto\022" +
"6\n\013task_report\030\001 \001(\0132!.hadoop.mapreduce." +
"TaskReportProto\"a\n GetTaskAttemptReportR" +
"equestProto\022=\n\017task_attempt_id\030\001 \001(\0132$.h" +
"adoop.mapreduce.TaskAttemptIdProto\"j\n!Ge" +
"tTaskAttemptReportResponseProto\022E\n\023task_" +
"attempt_report\030\001 \001(\0132(.hadoop.mapreduce." +
"TaskAttemptReportProto\"G\n\027GetCountersReq" +
"uestProto\022,\n\006job_id\030\001 \001(\0132\034.hadoop.mapre" +
"duce.JobIdProto\"M\n\030GetCountersResponsePr" +
"oto\0221\n\010counters\030\001 \001(\0132\037.hadoop.mapreduce" +
".CountersProto\"\205\001\n*GetTaskAttemptComplet" +
"ionEventsRequestProto\022,\n\006job_id\030\001 \001(\0132\034." +
"hadoop.mapreduce.JobIdProto\022\025\n\rfrom_even" +
"t_id\030\002 \001(\005\022\022\n\nmax_events\030\003 \001(\005\"{\n+GetTas" +
"kAttemptCompletionEventsResponseProto\022L\n" +
"\021completion_events\030\001 \003(\01321.hadoop.mapred" +
"uce.TaskAttemptCompletionEventProto\"~\n\032G" +
"etTaskReportsRequestProto\022,\n\006job_id\030\001 \001(" +
"\0132\034.hadoop.mapreduce.JobIdProto\0222\n\ttask_" +
"type\030\002 \001(\0162\037.hadoop.mapreduce.TaskTypePr" +
"oto\"V\n\033GetTaskReportsResponseProto\0227\n\014ta" +
"sk_reports\030\001 \003(\0132!.hadoop.mapreduce.Task" +
"ReportProto\"[\n\032GetDiagnosticsRequestProt" +
"o\022=\n\017task_attempt_id\030\001 \001(\0132$.hadoop.mapr" +
"educe.TaskAttemptIdProto\"2\n\033GetDiagnosti" +
"csResponseProto\022\023\n\013diagnostics\030\001 \003(\t\"C\n\023" +
"KillJobRequestProto\022,\n\006job_id\030\001 \001(\0132\034.ha" +
"doop.mapreduce.JobIdProto\"\026\n\024KillJobResp" +
"onseProto\"F\n\024KillTaskRequestProto\022.\n\007tas" +
"k_id\030\001 \001(\0132\035.hadoop.mapreduce.TaskIdProt" +
"o\"\027\n\025KillTaskResponseProto\"\\\n\033KillTaskAt" +
"temptRequestProto\022=\n\017task_attempt_id\030\001 \001" +
"(\0132$.hadoop.mapreduce.TaskAttemptIdProto" +
"\"\036\n\034KillTaskAttemptResponseProto\"\\\n\033Fail" +
"TaskAttemptRequestProto\022=\n\017task_attempt_" +
"id\030\001 \001(\0132$.hadoop.mapreduce.TaskAttemptI" +
"dProto\"\036\n\034FailTaskAttemptResponseProtoB=" +
"\n$org.apache.hadoop.mapreduce.v2.protoB\017" +
"MRServiceProtos\210\001\001\240\001\001"
};
descriptor = org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor[] {
org.apache.hadoop.security.proto.SecurityProtos.getDescriptor(),
org.apache.hadoop.mapreduce.v2.proto.MRProtos.getDescriptor(),
org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor(),
});
internal_static_hadoop_mapreduce_GetJobReportRequestProto_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_hadoop_mapreduce_GetJobReportRequestProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_mapreduce_GetJobReportRequestProto_descriptor,
new java.lang.String[] { "JobId", });
internal_static_hadoop_mapreduce_GetJobReportResponseProto_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_hadoop_mapreduce_GetJobReportResponseProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_mapreduce_GetJobReportResponseProto_descriptor,
new java.lang.String[] { "JobReport", });
internal_static_hadoop_mapreduce_GetTaskReportRequestProto_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_hadoop_mapreduce_GetTaskReportRequestProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_mapreduce_GetTaskReportRequestProto_descriptor,
new java.lang.String[] { "TaskId", });
internal_static_hadoop_mapreduce_GetTaskReportResponseProto_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_hadoop_mapreduce_GetTaskReportResponseProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_mapreduce_GetTaskReportResponseProto_descriptor,
new java.lang.String[] { "TaskReport", });
internal_static_hadoop_mapreduce_GetTaskAttemptReportRequestProto_descriptor =
getDescriptor().getMessageTypes().get(4);
internal_static_hadoop_mapreduce_GetTaskAttemptReportRequestProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_mapreduce_GetTaskAttemptReportRequestProto_descriptor,
new java.lang.String[] { "TaskAttemptId", });
internal_static_hadoop_mapreduce_GetTaskAttemptReportResponseProto_descriptor =
getDescriptor().getMessageTypes().get(5);
internal_static_hadoop_mapreduce_GetTaskAttemptReportResponseProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_mapreduce_GetTaskAttemptReportResponseProto_descriptor,
new java.lang.String[] { "TaskAttemptReport", });
internal_static_hadoop_mapreduce_GetCountersRequestProto_descriptor =
getDescriptor().getMessageTypes().get(6);
internal_static_hadoop_mapreduce_GetCountersRequestProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_mapreduce_GetCountersRequestProto_descriptor,
new java.lang.String[] { "JobId", });
internal_static_hadoop_mapreduce_GetCountersResponseProto_descriptor =
getDescriptor().getMessageTypes().get(7);
internal_static_hadoop_mapreduce_GetCountersResponseProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_mapreduce_GetCountersResponseProto_descriptor,
new java.lang.String[] { "Counters", });
internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsRequestProto_descriptor =
getDescriptor().getMessageTypes().get(8);
internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsRequestProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsRequestProto_descriptor,
new java.lang.String[] { "JobId", "FromEventId", "MaxEvents", });
internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsResponseProto_descriptor =
getDescriptor().getMessageTypes().get(9);
internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsResponseProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsResponseProto_descriptor,
new java.lang.String[] { "CompletionEvents", });
internal_static_hadoop_mapreduce_GetTaskReportsRequestProto_descriptor =
getDescriptor().getMessageTypes().get(10);
internal_static_hadoop_mapreduce_GetTaskReportsRequestProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_mapreduce_GetTaskReportsRequestProto_descriptor,
new java.lang.String[] { "JobId", "TaskType", });
internal_static_hadoop_mapreduce_GetTaskReportsResponseProto_descriptor =
getDescriptor().getMessageTypes().get(11);
internal_static_hadoop_mapreduce_GetTaskReportsResponseProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_mapreduce_GetTaskReportsResponseProto_descriptor,
new java.lang.String[] { "TaskReports", });
internal_static_hadoop_mapreduce_GetDiagnosticsRequestProto_descriptor =
getDescriptor().getMessageTypes().get(12);
internal_static_hadoop_mapreduce_GetDiagnosticsRequestProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_mapreduce_GetDiagnosticsRequestProto_descriptor,
new java.lang.String[] { "TaskAttemptId", });
internal_static_hadoop_mapreduce_GetDiagnosticsResponseProto_descriptor =
getDescriptor().getMessageTypes().get(13);
internal_static_hadoop_mapreduce_GetDiagnosticsResponseProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_mapreduce_GetDiagnosticsResponseProto_descriptor,
new java.lang.String[] { "Diagnostics", });
internal_static_hadoop_mapreduce_KillJobRequestProto_descriptor =
getDescriptor().getMessageTypes().get(14);
internal_static_hadoop_mapreduce_KillJobRequestProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_mapreduce_KillJobRequestProto_descriptor,
new java.lang.String[] { "JobId", });
internal_static_hadoop_mapreduce_KillJobResponseProto_descriptor =
getDescriptor().getMessageTypes().get(15);
internal_static_hadoop_mapreduce_KillJobResponseProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_mapreduce_KillJobResponseProto_descriptor,
new java.lang.String[] { });
internal_static_hadoop_mapreduce_KillTaskRequestProto_descriptor =
getDescriptor().getMessageTypes().get(16);
internal_static_hadoop_mapreduce_KillTaskRequestProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_mapreduce_KillTaskRequestProto_descriptor,
new java.lang.String[] { "TaskId", });
internal_static_hadoop_mapreduce_KillTaskResponseProto_descriptor =
getDescriptor().getMessageTypes().get(17);
internal_static_hadoop_mapreduce_KillTaskResponseProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_mapreduce_KillTaskResponseProto_descriptor,
new java.lang.String[] { });
internal_static_hadoop_mapreduce_KillTaskAttemptRequestProto_descriptor =
getDescriptor().getMessageTypes().get(18);
internal_static_hadoop_mapreduce_KillTaskAttemptRequestProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_mapreduce_KillTaskAttemptRequestProto_descriptor,
new java.lang.String[] { "TaskAttemptId", });
internal_static_hadoop_mapreduce_KillTaskAttemptResponseProto_descriptor =
getDescriptor().getMessageTypes().get(19);
internal_static_hadoop_mapreduce_KillTaskAttemptResponseProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_mapreduce_KillTaskAttemptResponseProto_descriptor,
new java.lang.String[] { });
internal_static_hadoop_mapreduce_FailTaskAttemptRequestProto_descriptor =
getDescriptor().getMessageTypes().get(20);
internal_static_hadoop_mapreduce_FailTaskAttemptRequestProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_mapreduce_FailTaskAttemptRequestProto_descriptor,
new java.lang.String[] { "TaskAttemptId", });
internal_static_hadoop_mapreduce_FailTaskAttemptResponseProto_descriptor =
getDescriptor().getMessageTypes().get(21);
internal_static_hadoop_mapreduce_FailTaskAttemptResponseProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_mapreduce_FailTaskAttemptResponseProto_descriptor,
new java.lang.String[] { });
org.apache.hadoop.security.proto.SecurityProtos.getDescriptor();
org.apache.hadoop.mapreduce.v2.proto.MRProtos.getDescriptor();
org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy