
org.apache.hadoop.mapreduce.jobhistory.TaskFailed Maven / Gradle / Ivy
/**
* Autogenerated by Avro
*
* DO NOT EDIT DIRECTLY
*/
package org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory;
import org.apache.hadoop.shaded.org.apache.avro.generic.GenericArray;
import org.apache.hadoop.shaded.org.apache.avro.specific.SpecificData;
import org.apache.hadoop.shaded.org.apache.avro.util.Utf8;
import org.apache.hadoop.shaded.org.apache.avro.message.BinaryMessageEncoder;
import org.apache.hadoop.shaded.org.apache.avro.message.BinaryMessageDecoder;
import org.apache.hadoop.shaded.org.apache.avro.message.SchemaStore;
@org.apache.hadoop.shaded.org.apache.avro.specific.AvroGenerated
public class TaskFailed extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
private static final long serialVersionUID = 8508355931636422515L;
public static final org.apache.hadoop.shaded.org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"TaskFailed\",\"namespace\":\"org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory\",\"fields\":[{\"name\":\"taskid\",\"type\":\"string\"},{\"name\":\"taskType\",\"type\":\"string\"},{\"name\":\"finishTime\",\"type\":\"long\"},{\"name\":\"error\",\"type\":\"string\"},{\"name\":\"failedDueToAttempt\",\"type\":[\"null\",\"string\"]},{\"name\":\"status\",\"type\":\"string\"},{\"name\":\"counters\",\"type\":[\"null\",{\"type\":\"record\",\"name\":\"JhCounters\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"groups\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"JhCounterGroup\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"displayName\",\"type\":\"string\"},{\"name\":\"counts\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"JhCounter\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"displayName\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"long\"}]}}}]}}}]}],\"default\":null}]}");
public static org.apache.hadoop.shaded.org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
private static SpecificData MODEL$ = new SpecificData();
private static final BinaryMessageEncoder ENCODER =
new BinaryMessageEncoder(MODEL$, SCHEMA$);
private static final BinaryMessageDecoder DECODER =
new BinaryMessageDecoder(MODEL$, SCHEMA$);
/**
* Return the BinaryMessageEncoder instance used by this class.
* @return the message encoder used by this class
*/
public static BinaryMessageEncoder getEncoder() {
return ENCODER;
}
/**
* Return the BinaryMessageDecoder instance used by this class.
* @return the message decoder used by this class
*/
public static BinaryMessageDecoder getDecoder() {
return DECODER;
}
/**
* Create a new BinaryMessageDecoder instance for this class that uses the specified {@link SchemaStore}.
* @param resolver a {@link SchemaStore} used to find schemas by fingerprint
* @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore
*/
public static BinaryMessageDecoder createDecoder(SchemaStore resolver) {
return new BinaryMessageDecoder(MODEL$, SCHEMA$, resolver);
}
/**
* Serializes this TaskFailed to a ByteBuffer.
* @return a buffer holding the serialized data for this instance
* @throws java.io.IOException if this instance could not be serialized
*/
public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException {
return ENCODER.encode(this);
}
/**
* Deserializes a TaskFailed from a ByteBuffer.
* @param b a byte buffer holding serialized data for an instance of this class
* @return a TaskFailed instance decoded from the given buffer
* @throws java.io.IOException if the given bytes could not be deserialized into an instance of this class
*/
public static TaskFailed fromByteBuffer(
java.nio.ByteBuffer b) throws java.io.IOException {
return DECODER.decode(b);
}
@Deprecated public java.lang.CharSequence taskid;
@Deprecated public java.lang.CharSequence taskType;
@Deprecated public long finishTime;
@Deprecated public java.lang.CharSequence error;
@Deprecated public java.lang.CharSequence failedDueToAttempt;
@Deprecated public java.lang.CharSequence status;
@Deprecated public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters counters;
/**
* Default constructor. Note that this does not initialize fields
* to their default values from the schema. If that is desired then
* one should use newBuilder()
.
*/
public TaskFailed() {}
/**
* All-args constructor.
* @param taskid The new value for taskid
* @param taskType The new value for taskType
* @param finishTime The new value for finishTime
* @param error The new value for error
* @param failedDueToAttempt The new value for failedDueToAttempt
* @param status The new value for status
* @param counters The new value for counters
*/
public TaskFailed(java.lang.CharSequence taskid, java.lang.CharSequence taskType, java.lang.Long finishTime, java.lang.CharSequence error, java.lang.CharSequence failedDueToAttempt, java.lang.CharSequence status, org.apache.hadoop.mapreduce.jobhistory.JhCounters counters) {
this.taskid = taskid;
this.taskType = taskType;
this.finishTime = finishTime;
this.error = error;
this.failedDueToAttempt = failedDueToAttempt;
this.status = status;
this.counters = counters;
}
public org.apache.hadoop.shaded.org.apache.avro.specific.SpecificData getSpecificData() { return MODEL$; }
public org.apache.hadoop.shaded.org.apache.avro.Schema getSchema() { return SCHEMA$; }
// Used by DatumWriter. Applications should not call.
public java.lang.Object get(int field$) {
switch (field$) {
case 0: return taskid;
case 1: return taskType;
case 2: return finishTime;
case 3: return error;
case 4: return failedDueToAttempt;
case 5: return status;
case 6: return counters;
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
}
}
// Used by DatumReader. Applications should not call.
@SuppressWarnings(value="unchecked")
public void put(int field$, java.lang.Object value$) {
switch (field$) {
case 0: taskid = (java.lang.CharSequence)value$; break;
case 1: taskType = (java.lang.CharSequence)value$; break;
case 2: finishTime = (java.lang.Long)value$; break;
case 3: error = (java.lang.CharSequence)value$; break;
case 4: failedDueToAttempt = (java.lang.CharSequence)value$; break;
case 5: status = (java.lang.CharSequence)value$; break;
case 6: counters = (org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters)value$; break;
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
}
}
/**
* Gets the value of the 'taskid' field.
* @return The value of the 'taskid' field.
*/
public java.lang.CharSequence getTaskid() {
return taskid;
}
/**
* Sets the value of the 'taskid' field.
* @param value the value to set.
*/
public void setTaskid(java.lang.CharSequence value) {
this.taskid = value;
}
/**
* Gets the value of the 'taskType' field.
* @return The value of the 'taskType' field.
*/
public java.lang.CharSequence getTaskType() {
return taskType;
}
/**
* Sets the value of the 'taskType' field.
* @param value the value to set.
*/
public void setTaskType(java.lang.CharSequence value) {
this.taskType = value;
}
/**
* Gets the value of the 'finishTime' field.
* @return The value of the 'finishTime' field.
*/
public long getFinishTime() {
return finishTime;
}
/**
* Sets the value of the 'finishTime' field.
* @param value the value to set.
*/
public void setFinishTime(long value) {
this.finishTime = value;
}
/**
* Gets the value of the 'error' field.
* @return The value of the 'error' field.
*/
public java.lang.CharSequence getError() {
return error;
}
/**
* Sets the value of the 'error' field.
* @param value the value to set.
*/
public void setError(java.lang.CharSequence value) {
this.error = value;
}
/**
* Gets the value of the 'failedDueToAttempt' field.
* @return The value of the 'failedDueToAttempt' field.
*/
public java.lang.CharSequence getFailedDueToAttempt() {
return failedDueToAttempt;
}
/**
* Sets the value of the 'failedDueToAttempt' field.
* @param value the value to set.
*/
public void setFailedDueToAttempt(java.lang.CharSequence value) {
this.failedDueToAttempt = value;
}
/**
* Gets the value of the 'status' field.
* @return The value of the 'status' field.
*/
public java.lang.CharSequence getStatus() {
return status;
}
/**
* Sets the value of the 'status' field.
* @param value the value to set.
*/
public void setStatus(java.lang.CharSequence value) {
this.status = value;
}
/**
* Gets the value of the 'counters' field.
* @return The value of the 'counters' field.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters getCounters() {
return counters;
}
/**
* Sets the value of the 'counters' field.
* @param value the value to set.
*/
public void setCounters(org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters value) {
this.counters = value;
}
/**
* Creates a new TaskFailed RecordBuilder.
* @return A new TaskFailed RecordBuilder
*/
public static org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder newBuilder() {
return new org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder();
}
/**
* Creates a new TaskFailed RecordBuilder by copying an existing Builder.
* @param other The existing builder to copy.
* @return A new TaskFailed RecordBuilder
*/
public static org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder other) {
if (other == null) {
return new org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder();
} else {
return new org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder(other);
}
}
/**
* Creates a new TaskFailed RecordBuilder by copying an existing TaskFailed instance.
* @param other The existing instance to copy.
* @return A new TaskFailed RecordBuilder
*/
public static org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskFailed other) {
if (other == null) {
return new org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder();
} else {
return new org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder(other);
}
}
/**
* RecordBuilder for TaskFailed instances.
*/
@org.apache.hadoop.shaded.org.apache.avro.specific.AvroGenerated
public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase
implements org.apache.avro.data.RecordBuilder {
private java.lang.CharSequence taskid;
private java.lang.CharSequence taskType;
private long finishTime;
private java.lang.CharSequence error;
private java.lang.CharSequence failedDueToAttempt;
private java.lang.CharSequence status;
private org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters counters;
private org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters.Builder countersBuilder;
/** Creates a new Builder */
private Builder() {
super(SCHEMA$);
}
/**
* Creates a Builder by copying an existing Builder.
* @param other The existing Builder to copy.
*/
private Builder(org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder other) {
super(other);
if (isValidValue(fields()[0], other.taskid)) {
this.taskid = data().deepCopy(fields()[0].schema(), other.taskid);
fieldSetFlags()[0] = other.fieldSetFlags()[0];
}
if (isValidValue(fields()[1], other.taskType)) {
this.taskType = data().deepCopy(fields()[1].schema(), other.taskType);
fieldSetFlags()[1] = other.fieldSetFlags()[1];
}
if (isValidValue(fields()[2], other.finishTime)) {
this.finishTime = data().deepCopy(fields()[2].schema(), other.finishTime);
fieldSetFlags()[2] = other.fieldSetFlags()[2];
}
if (isValidValue(fields()[3], other.error)) {
this.error = data().deepCopy(fields()[3].schema(), other.error);
fieldSetFlags()[3] = other.fieldSetFlags()[3];
}
if (isValidValue(fields()[4], other.failedDueToAttempt)) {
this.failedDueToAttempt = data().deepCopy(fields()[4].schema(), other.failedDueToAttempt);
fieldSetFlags()[4] = other.fieldSetFlags()[4];
}
if (isValidValue(fields()[5], other.status)) {
this.status = data().deepCopy(fields()[5].schema(), other.status);
fieldSetFlags()[5] = other.fieldSetFlags()[5];
}
if (isValidValue(fields()[6], other.counters)) {
this.counters = data().deepCopy(fields()[6].schema(), other.counters);
fieldSetFlags()[6] = other.fieldSetFlags()[6];
}
if (other.hasCountersBuilder()) {
this.countersBuilder = org.apache.hadoop.mapreduce.jobhistory.JhCounters.newBuilder(other.getCountersBuilder());
}
}
/**
* Creates a Builder by copying an existing TaskFailed instance
* @param other The existing instance to copy.
*/
private Builder(org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskFailed other) {
super(SCHEMA$);
if (isValidValue(fields()[0], other.taskid)) {
this.taskid = data().deepCopy(fields()[0].schema(), other.taskid);
fieldSetFlags()[0] = true;
}
if (isValidValue(fields()[1], other.taskType)) {
this.taskType = data().deepCopy(fields()[1].schema(), other.taskType);
fieldSetFlags()[1] = true;
}
if (isValidValue(fields()[2], other.finishTime)) {
this.finishTime = data().deepCopy(fields()[2].schema(), other.finishTime);
fieldSetFlags()[2] = true;
}
if (isValidValue(fields()[3], other.error)) {
this.error = data().deepCopy(fields()[3].schema(), other.error);
fieldSetFlags()[3] = true;
}
if (isValidValue(fields()[4], other.failedDueToAttempt)) {
this.failedDueToAttempt = data().deepCopy(fields()[4].schema(), other.failedDueToAttempt);
fieldSetFlags()[4] = true;
}
if (isValidValue(fields()[5], other.status)) {
this.status = data().deepCopy(fields()[5].schema(), other.status);
fieldSetFlags()[5] = true;
}
if (isValidValue(fields()[6], other.counters)) {
this.counters = data().deepCopy(fields()[6].schema(), other.counters);
fieldSetFlags()[6] = true;
}
this.countersBuilder = null;
}
/**
* Gets the value of the 'taskid' field.
* @return The value.
*/
public java.lang.CharSequence getTaskid() {
return taskid;
}
/**
* Sets the value of the 'taskid' field.
* @param value The value of 'taskid'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder setTaskid(java.lang.CharSequence value) {
validate(fields()[0], value);
this.taskid = value;
fieldSetFlags()[0] = true;
return this;
}
/**
* Checks whether the 'taskid' field has been set.
* @return True if the 'taskid' field has been set, false otherwise.
*/
public boolean hasTaskid() {
return fieldSetFlags()[0];
}
/**
* Clears the value of the 'taskid' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder clearTaskid() {
taskid = null;
fieldSetFlags()[0] = false;
return this;
}
/**
* Gets the value of the 'taskType' field.
* @return The value.
*/
public java.lang.CharSequence getTaskType() {
return taskType;
}
/**
* Sets the value of the 'taskType' field.
* @param value The value of 'taskType'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder setTaskType(java.lang.CharSequence value) {
validate(fields()[1], value);
this.taskType = value;
fieldSetFlags()[1] = true;
return this;
}
/**
* Checks whether the 'taskType' field has been set.
* @return True if the 'taskType' field has been set, false otherwise.
*/
public boolean hasTaskType() {
return fieldSetFlags()[1];
}
/**
* Clears the value of the 'taskType' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder clearTaskType() {
taskType = null;
fieldSetFlags()[1] = false;
return this;
}
/**
* Gets the value of the 'finishTime' field.
* @return The value.
*/
public long getFinishTime() {
return finishTime;
}
/**
* Sets the value of the 'finishTime' field.
* @param value The value of 'finishTime'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder setFinishTime(long value) {
validate(fields()[2], value);
this.finishTime = value;
fieldSetFlags()[2] = true;
return this;
}
/**
* Checks whether the 'finishTime' field has been set.
* @return True if the 'finishTime' field has been set, false otherwise.
*/
public boolean hasFinishTime() {
return fieldSetFlags()[2];
}
/**
* Clears the value of the 'finishTime' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder clearFinishTime() {
fieldSetFlags()[2] = false;
return this;
}
/**
* Gets the value of the 'error' field.
* @return The value.
*/
public java.lang.CharSequence getError() {
return error;
}
/**
* Sets the value of the 'error' field.
* @param value The value of 'error'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder setError(java.lang.CharSequence value) {
validate(fields()[3], value);
this.error = value;
fieldSetFlags()[3] = true;
return this;
}
/**
* Checks whether the 'error' field has been set.
* @return True if the 'error' field has been set, false otherwise.
*/
public boolean hasError() {
return fieldSetFlags()[3];
}
/**
* Clears the value of the 'error' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder clearError() {
error = null;
fieldSetFlags()[3] = false;
return this;
}
/**
* Gets the value of the 'failedDueToAttempt' field.
* @return The value.
*/
public java.lang.CharSequence getFailedDueToAttempt() {
return failedDueToAttempt;
}
/**
* Sets the value of the 'failedDueToAttempt' field.
* @param value The value of 'failedDueToAttempt'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder setFailedDueToAttempt(java.lang.CharSequence value) {
validate(fields()[4], value);
this.failedDueToAttempt = value;
fieldSetFlags()[4] = true;
return this;
}
/**
* Checks whether the 'failedDueToAttempt' field has been set.
* @return True if the 'failedDueToAttempt' field has been set, false otherwise.
*/
public boolean hasFailedDueToAttempt() {
return fieldSetFlags()[4];
}
/**
* Clears the value of the 'failedDueToAttempt' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder clearFailedDueToAttempt() {
failedDueToAttempt = null;
fieldSetFlags()[4] = false;
return this;
}
/**
* Gets the value of the 'status' field.
* @return The value.
*/
public java.lang.CharSequence getStatus() {
return status;
}
/**
* Sets the value of the 'status' field.
* @param value The value of 'status'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder setStatus(java.lang.CharSequence value) {
validate(fields()[5], value);
this.status = value;
fieldSetFlags()[5] = true;
return this;
}
/**
* Checks whether the 'status' field has been set.
* @return True if the 'status' field has been set, false otherwise.
*/
public boolean hasStatus() {
return fieldSetFlags()[5];
}
/**
* Clears the value of the 'status' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder clearStatus() {
status = null;
fieldSetFlags()[5] = false;
return this;
}
/**
* Gets the value of the 'counters' field.
* @return The value.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters getCounters() {
return counters;
}
/**
* Sets the value of the 'counters' field.
* @param value The value of 'counters'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder setCounters(org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters value) {
validate(fields()[6], value);
this.countersBuilder = null;
this.counters = value;
fieldSetFlags()[6] = true;
return this;
}
/**
* Checks whether the 'counters' field has been set.
* @return True if the 'counters' field has been set, false otherwise.
*/
public boolean hasCounters() {
return fieldSetFlags()[6];
}
/**
* Gets the Builder instance for the 'counters' field and creates one if it doesn't exist yet.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters.Builder getCountersBuilder() {
if (countersBuilder == null) {
if (hasCounters()) {
setCountersBuilder(org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters.newBuilder(counters));
} else {
setCountersBuilder(org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters.newBuilder());
}
}
return countersBuilder;
}
/**
* Sets the Builder instance for the 'counters' field
* @param value The builder instance that must be set.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder setCountersBuilder(org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters.Builder value) {
clearCounters();
countersBuilder = value;
return this;
}
/**
* Checks whether the 'counters' field has an active Builder instance
* @return True if the 'counters' field has an active Builder instance
*/
public boolean hasCountersBuilder() {
return countersBuilder != null;
}
/**
* Clears the value of the 'counters' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder clearCounters() {
counters = null;
countersBuilder = null;
fieldSetFlags()[6] = false;
return this;
}
@Override
@SuppressWarnings("unchecked")
public TaskFailed build() {
try {
TaskFailed record = new TaskFailed();
record.taskid = fieldSetFlags()[0] ? this.taskid : (java.lang.CharSequence) defaultValue(fields()[0]);
record.taskType = fieldSetFlags()[1] ? this.taskType : (java.lang.CharSequence) defaultValue(fields()[1]);
record.finishTime = fieldSetFlags()[2] ? this.finishTime : (java.lang.Long) defaultValue(fields()[2]);
record.error = fieldSetFlags()[3] ? this.error : (java.lang.CharSequence) defaultValue(fields()[3]);
record.failedDueToAttempt = fieldSetFlags()[4] ? this.failedDueToAttempt : (java.lang.CharSequence) defaultValue(fields()[4]);
record.status = fieldSetFlags()[5] ? this.status : (java.lang.CharSequence) defaultValue(fields()[5]);
if (countersBuilder != null) {
try {
record.counters = this.countersBuilder.build();
} catch (org.apache.hadoop.shaded.org.apache.avro.AvroMissingFieldException e) {
e.addParentField(record.getSchema().getField("counters"));
throw e;
}
} else {
record.counters = fieldSetFlags()[6] ? this.counters : (org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters) defaultValue(fields()[6]);
}
return record;
} catch (org.apache.hadoop.shaded.org.apache.avro.AvroMissingFieldException e) {
throw e;
} catch (java.lang.Exception e) {
throw new org.apache.avro.AvroRuntimeException(e);
}
}
}
@SuppressWarnings("unchecked")
private static final org.apache.hadoop.shaded.org.apache.avro.io.DatumWriter
WRITER$ = (org.apache.hadoop.shaded.org.apache.avro.io.DatumWriter)MODEL$.createDatumWriter(SCHEMA$);
@Override public void writeExternal(java.io.ObjectOutput out)
throws java.io.IOException {
WRITER$.write(this, SpecificData.getEncoder(out));
}
@SuppressWarnings("unchecked")
private static final org.apache.hadoop.shaded.org.apache.avro.io.DatumReader
READER$ = (org.apache.hadoop.shaded.org.apache.avro.io.DatumReader)MODEL$.createDatumReader(SCHEMA$);
@Override public void readExternal(java.io.ObjectInput in)
throws java.io.IOException {
READER$.read(this, SpecificData.getDecoder(in));
}
@Override protected boolean hasCustomCoders() { return true; }
@Override public void customEncode(org.apache.hadoop.shaded.org.apache.avro.io.Encoder out)
throws java.io.IOException
{
out.writeString(this.taskid);
out.writeString(this.taskType);
out.writeLong(this.finishTime);
out.writeString(this.error);
if (this.failedDueToAttempt == null) {
out.writeIndex(0);
out.writeNull();
} else {
out.writeIndex(1);
out.writeString(this.failedDueToAttempt);
}
out.writeString(this.status);
if (this.counters == null) {
out.writeIndex(0);
out.writeNull();
} else {
out.writeIndex(1);
this.counters.customEncode(out);
}
}
@Override public void customDecode(org.apache.hadoop.shaded.org.apache.avro.io.ResolvingDecoder in)
throws java.io.IOException
{
org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff();
if (fieldOrder == null) {
this.taskid = in.readString(this.taskid instanceof Utf8 ? (Utf8)this.taskid : null);
this.taskType = in.readString(this.taskType instanceof Utf8 ? (Utf8)this.taskType : null);
this.finishTime = in.readLong();
this.error = in.readString(this.error instanceof Utf8 ? (Utf8)this.error : null);
if (in.readIndex() != 1) {
in.readNull();
this.failedDueToAttempt = null;
} else {
this.failedDueToAttempt = in.readString(this.failedDueToAttempt instanceof Utf8 ? (Utf8)this.failedDueToAttempt : null);
}
this.status = in.readString(this.status instanceof Utf8 ? (Utf8)this.status : null);
if (in.readIndex() != 1) {
in.readNull();
this.counters = null;
} else {
if (this.counters == null) {
this.counters = new org.apache.hadoop.mapreduce.jobhistory.JhCounters();
}
this.counters.customDecode(in);
}
} else {
for (int i = 0; i < 7; i++) {
switch (fieldOrder[i].pos()) {
case 0:
this.taskid = in.readString(this.taskid instanceof Utf8 ? (Utf8)this.taskid : null);
break;
case 1:
this.taskType = in.readString(this.taskType instanceof Utf8 ? (Utf8)this.taskType : null);
break;
case 2:
this.finishTime = in.readLong();
break;
case 3:
this.error = in.readString(this.error instanceof Utf8 ? (Utf8)this.error : null);
break;
case 4:
if (in.readIndex() != 1) {
in.readNull();
this.failedDueToAttempt = null;
} else {
this.failedDueToAttempt = in.readString(this.failedDueToAttempt instanceof Utf8 ? (Utf8)this.failedDueToAttempt : null);
}
break;
case 5:
this.status = in.readString(this.status instanceof Utf8 ? (Utf8)this.status : null);
break;
case 6:
if (in.readIndex() != 1) {
in.readNull();
this.counters = null;
} else {
if (this.counters == null) {
this.counters = new org.apache.hadoop.mapreduce.jobhistory.JhCounters();
}
this.counters.customDecode(in);
}
break;
default:
throw new java.io.IOException("Corrupt ResolvingDecoder.");
}
}
}
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy