
org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion Maven / Gradle / Ivy
/**
* Autogenerated by Avro
*
* DO NOT EDIT DIRECTLY
*/
package org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory;
import org.apache.hadoop.shaded.org.apache.avro.generic.GenericArray;
import org.apache.hadoop.shaded.org.apache.avro.specific.SpecificData;
import org.apache.hadoop.shaded.org.apache.avro.util.Utf8;
import org.apache.hadoop.shaded.org.apache.avro.message.BinaryMessageEncoder;
import org.apache.hadoop.shaded.org.apache.avro.message.BinaryMessageDecoder;
import org.apache.hadoop.shaded.org.apache.avro.message.SchemaStore;
@org.apache.hadoop.shaded.org.apache.avro.specific.AvroGenerated
public class TaskAttemptUnsuccessfulCompletion extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
private static final long serialVersionUID = -945198760532711624L;
public static final org.apache.hadoop.shaded.org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"TaskAttemptUnsuccessfulCompletion\",\"namespace\":\"org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory\",\"fields\":[{\"name\":\"taskid\",\"type\":\"string\"},{\"name\":\"taskType\",\"type\":\"string\"},{\"name\":\"attemptId\",\"type\":\"string\"},{\"name\":\"finishTime\",\"type\":\"long\"},{\"name\":\"hostname\",\"type\":\"string\"},{\"name\":\"port\",\"type\":\"int\"},{\"name\":\"rackname\",\"type\":\"string\"},{\"name\":\"status\",\"type\":\"string\"},{\"name\":\"error\",\"type\":\"string\"},{\"name\":\"counters\",\"type\":[\"null\",{\"type\":\"record\",\"name\":\"JhCounters\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"groups\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"JhCounterGroup\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"displayName\",\"type\":\"string\"},{\"name\":\"counts\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"JhCounter\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"displayName\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"long\"}]}}}]}}}]}],\"default\":null},{\"name\":\"clockSplits\",\"type\":{\"type\":\"array\",\"items\":\"int\"}},{\"name\":\"cpuUsages\",\"type\":{\"type\":\"array\",\"items\":\"int\"}},{\"name\":\"vMemKbytes\",\"type\":{\"type\":\"array\",\"items\":\"int\"}},{\"name\":\"physMemKbytes\",\"type\":{\"type\":\"array\",\"items\":\"int\"}}]}");
public static org.apache.hadoop.shaded.org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
private static SpecificData MODEL$ = new SpecificData();
private static final BinaryMessageEncoder ENCODER =
new BinaryMessageEncoder(MODEL$, SCHEMA$);
private static final BinaryMessageDecoder DECODER =
new BinaryMessageDecoder(MODEL$, SCHEMA$);
/**
* Return the BinaryMessageEncoder instance used by this class.
* @return the message encoder used by this class
*/
public static BinaryMessageEncoder getEncoder() {
return ENCODER;
}
/**
* Return the BinaryMessageDecoder instance used by this class.
* @return the message decoder used by this class
*/
public static BinaryMessageDecoder getDecoder() {
return DECODER;
}
/**
* Create a new BinaryMessageDecoder instance for this class that uses the specified {@link SchemaStore}.
* @param resolver a {@link SchemaStore} used to find schemas by fingerprint
* @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore
*/
public static BinaryMessageDecoder createDecoder(SchemaStore resolver) {
return new BinaryMessageDecoder(MODEL$, SCHEMA$, resolver);
}
/**
* Serializes this TaskAttemptUnsuccessfulCompletion to a ByteBuffer.
* @return a buffer holding the serialized data for this instance
* @throws java.io.IOException if this instance could not be serialized
*/
public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException {
return ENCODER.encode(this);
}
/**
* Deserializes a TaskAttemptUnsuccessfulCompletion from a ByteBuffer.
* @param b a byte buffer holding serialized data for an instance of this class
* @return a TaskAttemptUnsuccessfulCompletion instance decoded from the given buffer
* @throws java.io.IOException if the given bytes could not be deserialized into an instance of this class
*/
public static TaskAttemptUnsuccessfulCompletion fromByteBuffer(
java.nio.ByteBuffer b) throws java.io.IOException {
return DECODER.decode(b);
}
@Deprecated public java.lang.CharSequence taskid;
@Deprecated public java.lang.CharSequence taskType;
@Deprecated public java.lang.CharSequence attemptId;
@Deprecated public long finishTime;
@Deprecated public java.lang.CharSequence hostname;
@Deprecated public int port;
@Deprecated public java.lang.CharSequence rackname;
@Deprecated public java.lang.CharSequence status;
@Deprecated public java.lang.CharSequence error;
@Deprecated public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters counters;
@Deprecated public java.util.List clockSplits;
@Deprecated public java.util.List cpuUsages;
@Deprecated public java.util.List vMemKbytes;
@Deprecated public java.util.List physMemKbytes;
/**
* Default constructor. Note that this does not initialize fields
* to their default values from the schema. If that is desired then
* one should use newBuilder()
.
*/
public TaskAttemptUnsuccessfulCompletion() {}
/**
* All-args constructor.
* @param taskid The new value for taskid
* @param taskType The new value for taskType
* @param attemptId The new value for attemptId
* @param finishTime The new value for finishTime
* @param hostname The new value for hostname
* @param port The new value for port
* @param rackname The new value for rackname
* @param status The new value for status
* @param error The new value for error
* @param counters The new value for counters
* @param clockSplits The new value for clockSplits
* @param cpuUsages The new value for cpuUsages
* @param vMemKbytes The new value for vMemKbytes
* @param physMemKbytes The new value for physMemKbytes
*/
public TaskAttemptUnsuccessfulCompletion(java.lang.CharSequence taskid, java.lang.CharSequence taskType, java.lang.CharSequence attemptId, java.lang.Long finishTime, java.lang.CharSequence hostname, java.lang.Integer port, java.lang.CharSequence rackname, java.lang.CharSequence status, java.lang.CharSequence error, org.apache.hadoop.mapreduce.jobhistory.JhCounters counters, java.util.List clockSplits, java.util.List cpuUsages, java.util.List vMemKbytes, java.util.List physMemKbytes) {
this.taskid = taskid;
this.taskType = taskType;
this.attemptId = attemptId;
this.finishTime = finishTime;
this.hostname = hostname;
this.port = port;
this.rackname = rackname;
this.status = status;
this.error = error;
this.counters = counters;
this.clockSplits = clockSplits;
this.cpuUsages = cpuUsages;
this.vMemKbytes = vMemKbytes;
this.physMemKbytes = physMemKbytes;
}
public org.apache.hadoop.shaded.org.apache.avro.specific.SpecificData getSpecificData() { return MODEL$; }
public org.apache.hadoop.shaded.org.apache.avro.Schema getSchema() { return SCHEMA$; }
// Used by DatumWriter. Applications should not call.
public java.lang.Object get(int field$) {
switch (field$) {
case 0: return taskid;
case 1: return taskType;
case 2: return attemptId;
case 3: return finishTime;
case 4: return hostname;
case 5: return port;
case 6: return rackname;
case 7: return status;
case 8: return error;
case 9: return counters;
case 10: return clockSplits;
case 11: return cpuUsages;
case 12: return vMemKbytes;
case 13: return physMemKbytes;
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
}
}
// Used by DatumReader. Applications should not call.
@SuppressWarnings(value="unchecked")
public void put(int field$, java.lang.Object value$) {
switch (field$) {
case 0: taskid = (java.lang.CharSequence)value$; break;
case 1: taskType = (java.lang.CharSequence)value$; break;
case 2: attemptId = (java.lang.CharSequence)value$; break;
case 3: finishTime = (java.lang.Long)value$; break;
case 4: hostname = (java.lang.CharSequence)value$; break;
case 5: port = (java.lang.Integer)value$; break;
case 6: rackname = (java.lang.CharSequence)value$; break;
case 7: status = (java.lang.CharSequence)value$; break;
case 8: error = (java.lang.CharSequence)value$; break;
case 9: counters = (org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters)value$; break;
case 10: clockSplits = (java.util.List)value$; break;
case 11: cpuUsages = (java.util.List)value$; break;
case 12: vMemKbytes = (java.util.List)value$; break;
case 13: physMemKbytes = (java.util.List)value$; break;
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
}
}
/**
* Gets the value of the 'taskid' field.
* @return The value of the 'taskid' field.
*/
public java.lang.CharSequence getTaskid() {
return taskid;
}
/**
* Sets the value of the 'taskid' field.
* @param value the value to set.
*/
public void setTaskid(java.lang.CharSequence value) {
this.taskid = value;
}
/**
* Gets the value of the 'taskType' field.
* @return The value of the 'taskType' field.
*/
public java.lang.CharSequence getTaskType() {
return taskType;
}
/**
* Sets the value of the 'taskType' field.
* @param value the value to set.
*/
public void setTaskType(java.lang.CharSequence value) {
this.taskType = value;
}
/**
* Gets the value of the 'attemptId' field.
* @return The value of the 'attemptId' field.
*/
public java.lang.CharSequence getAttemptId() {
return attemptId;
}
/**
* Sets the value of the 'attemptId' field.
* @param value the value to set.
*/
public void setAttemptId(java.lang.CharSequence value) {
this.attemptId = value;
}
/**
* Gets the value of the 'finishTime' field.
* @return The value of the 'finishTime' field.
*/
public long getFinishTime() {
return finishTime;
}
/**
* Sets the value of the 'finishTime' field.
* @param value the value to set.
*/
public void setFinishTime(long value) {
this.finishTime = value;
}
/**
* Gets the value of the 'hostname' field.
* @return The value of the 'hostname' field.
*/
public java.lang.CharSequence getHostname() {
return hostname;
}
/**
* Sets the value of the 'hostname' field.
* @param value the value to set.
*/
public void setHostname(java.lang.CharSequence value) {
this.hostname = value;
}
/**
* Gets the value of the 'port' field.
* @return The value of the 'port' field.
*/
public int getPort() {
return port;
}
/**
* Sets the value of the 'port' field.
* @param value the value to set.
*/
public void setPort(int value) {
this.port = value;
}
/**
* Gets the value of the 'rackname' field.
* @return The value of the 'rackname' field.
*/
public java.lang.CharSequence getRackname() {
return rackname;
}
/**
* Sets the value of the 'rackname' field.
* @param value the value to set.
*/
public void setRackname(java.lang.CharSequence value) {
this.rackname = value;
}
/**
* Gets the value of the 'status' field.
* @return The value of the 'status' field.
*/
public java.lang.CharSequence getStatus() {
return status;
}
/**
* Sets the value of the 'status' field.
* @param value the value to set.
*/
public void setStatus(java.lang.CharSequence value) {
this.status = value;
}
/**
* Gets the value of the 'error' field.
* @return The value of the 'error' field.
*/
public java.lang.CharSequence getError() {
return error;
}
/**
* Sets the value of the 'error' field.
* @param value the value to set.
*/
public void setError(java.lang.CharSequence value) {
this.error = value;
}
/**
* Gets the value of the 'counters' field.
* @return The value of the 'counters' field.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters getCounters() {
return counters;
}
/**
* Sets the value of the 'counters' field.
* @param value the value to set.
*/
public void setCounters(org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters value) {
this.counters = value;
}
/**
* Gets the value of the 'clockSplits' field.
* @return The value of the 'clockSplits' field.
*/
public java.util.List getClockSplits() {
return clockSplits;
}
/**
* Sets the value of the 'clockSplits' field.
* @param value the value to set.
*/
public void setClockSplits(java.util.List value) {
this.clockSplits = value;
}
/**
* Gets the value of the 'cpuUsages' field.
* @return The value of the 'cpuUsages' field.
*/
public java.util.List getCpuUsages() {
return cpuUsages;
}
/**
* Sets the value of the 'cpuUsages' field.
* @param value the value to set.
*/
public void setCpuUsages(java.util.List value) {
this.cpuUsages = value;
}
/**
* Gets the value of the 'vMemKbytes' field.
* @return The value of the 'vMemKbytes' field.
*/
public java.util.List getVMemKbytes() {
return vMemKbytes;
}
/**
* Sets the value of the 'vMemKbytes' field.
* @param value the value to set.
*/
public void setVMemKbytes(java.util.List value) {
this.vMemKbytes = value;
}
/**
* Gets the value of the 'physMemKbytes' field.
* @return The value of the 'physMemKbytes' field.
*/
public java.util.List getPhysMemKbytes() {
return physMemKbytes;
}
/**
* Sets the value of the 'physMemKbytes' field.
* @param value the value to set.
*/
public void setPhysMemKbytes(java.util.List value) {
this.physMemKbytes = value;
}
/**
* Creates a new TaskAttemptUnsuccessfulCompletion RecordBuilder.
* @return A new TaskAttemptUnsuccessfulCompletion RecordBuilder
*/
public static org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder newBuilder() {
return new org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder();
}
/**
* Creates a new TaskAttemptUnsuccessfulCompletion RecordBuilder by copying an existing Builder.
* @param other The existing builder to copy.
* @return A new TaskAttemptUnsuccessfulCompletion RecordBuilder
*/
public static org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder other) {
if (other == null) {
return new org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder();
} else {
return new org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder(other);
}
}
/**
* Creates a new TaskAttemptUnsuccessfulCompletion RecordBuilder by copying an existing TaskAttemptUnsuccessfulCompletion instance.
* @param other The existing instance to copy.
* @return A new TaskAttemptUnsuccessfulCompletion RecordBuilder
*/
public static org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion other) {
if (other == null) {
return new org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder();
} else {
return new org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder(other);
}
}
/**
* RecordBuilder for TaskAttemptUnsuccessfulCompletion instances.
*/
@org.apache.hadoop.shaded.org.apache.avro.specific.AvroGenerated
public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase
implements org.apache.avro.data.RecordBuilder {
private java.lang.CharSequence taskid;
private java.lang.CharSequence taskType;
private java.lang.CharSequence attemptId;
private long finishTime;
private java.lang.CharSequence hostname;
private int port;
private java.lang.CharSequence rackname;
private java.lang.CharSequence status;
private java.lang.CharSequence error;
private org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters counters;
private org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters.Builder countersBuilder;
private java.util.List clockSplits;
private java.util.List cpuUsages;
private java.util.List vMemKbytes;
private java.util.List physMemKbytes;
/** Creates a new Builder */
private Builder() {
super(SCHEMA$);
}
/**
* Creates a Builder by copying an existing Builder.
* @param other The existing Builder to copy.
*/
private Builder(org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder other) {
super(other);
if (isValidValue(fields()[0], other.taskid)) {
this.taskid = data().deepCopy(fields()[0].schema(), other.taskid);
fieldSetFlags()[0] = other.fieldSetFlags()[0];
}
if (isValidValue(fields()[1], other.taskType)) {
this.taskType = data().deepCopy(fields()[1].schema(), other.taskType);
fieldSetFlags()[1] = other.fieldSetFlags()[1];
}
if (isValidValue(fields()[2], other.attemptId)) {
this.attemptId = data().deepCopy(fields()[2].schema(), other.attemptId);
fieldSetFlags()[2] = other.fieldSetFlags()[2];
}
if (isValidValue(fields()[3], other.finishTime)) {
this.finishTime = data().deepCopy(fields()[3].schema(), other.finishTime);
fieldSetFlags()[3] = other.fieldSetFlags()[3];
}
if (isValidValue(fields()[4], other.hostname)) {
this.hostname = data().deepCopy(fields()[4].schema(), other.hostname);
fieldSetFlags()[4] = other.fieldSetFlags()[4];
}
if (isValidValue(fields()[5], other.port)) {
this.port = data().deepCopy(fields()[5].schema(), other.port);
fieldSetFlags()[5] = other.fieldSetFlags()[5];
}
if (isValidValue(fields()[6], other.rackname)) {
this.rackname = data().deepCopy(fields()[6].schema(), other.rackname);
fieldSetFlags()[6] = other.fieldSetFlags()[6];
}
if (isValidValue(fields()[7], other.status)) {
this.status = data().deepCopy(fields()[7].schema(), other.status);
fieldSetFlags()[7] = other.fieldSetFlags()[7];
}
if (isValidValue(fields()[8], other.error)) {
this.error = data().deepCopy(fields()[8].schema(), other.error);
fieldSetFlags()[8] = other.fieldSetFlags()[8];
}
if (isValidValue(fields()[9], other.counters)) {
this.counters = data().deepCopy(fields()[9].schema(), other.counters);
fieldSetFlags()[9] = other.fieldSetFlags()[9];
}
if (other.hasCountersBuilder()) {
this.countersBuilder = org.apache.hadoop.mapreduce.jobhistory.JhCounters.newBuilder(other.getCountersBuilder());
}
if (isValidValue(fields()[10], other.clockSplits)) {
this.clockSplits = data().deepCopy(fields()[10].schema(), other.clockSplits);
fieldSetFlags()[10] = other.fieldSetFlags()[10];
}
if (isValidValue(fields()[11], other.cpuUsages)) {
this.cpuUsages = data().deepCopy(fields()[11].schema(), other.cpuUsages);
fieldSetFlags()[11] = other.fieldSetFlags()[11];
}
if (isValidValue(fields()[12], other.vMemKbytes)) {
this.vMemKbytes = data().deepCopy(fields()[12].schema(), other.vMemKbytes);
fieldSetFlags()[12] = other.fieldSetFlags()[12];
}
if (isValidValue(fields()[13], other.physMemKbytes)) {
this.physMemKbytes = data().deepCopy(fields()[13].schema(), other.physMemKbytes);
fieldSetFlags()[13] = other.fieldSetFlags()[13];
}
}
/**
* Creates a Builder by copying an existing TaskAttemptUnsuccessfulCompletion instance
* @param other The existing instance to copy.
*/
private Builder(org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion other) {
super(SCHEMA$);
if (isValidValue(fields()[0], other.taskid)) {
this.taskid = data().deepCopy(fields()[0].schema(), other.taskid);
fieldSetFlags()[0] = true;
}
if (isValidValue(fields()[1], other.taskType)) {
this.taskType = data().deepCopy(fields()[1].schema(), other.taskType);
fieldSetFlags()[1] = true;
}
if (isValidValue(fields()[2], other.attemptId)) {
this.attemptId = data().deepCopy(fields()[2].schema(), other.attemptId);
fieldSetFlags()[2] = true;
}
if (isValidValue(fields()[3], other.finishTime)) {
this.finishTime = data().deepCopy(fields()[3].schema(), other.finishTime);
fieldSetFlags()[3] = true;
}
if (isValidValue(fields()[4], other.hostname)) {
this.hostname = data().deepCopy(fields()[4].schema(), other.hostname);
fieldSetFlags()[4] = true;
}
if (isValidValue(fields()[5], other.port)) {
this.port = data().deepCopy(fields()[5].schema(), other.port);
fieldSetFlags()[5] = true;
}
if (isValidValue(fields()[6], other.rackname)) {
this.rackname = data().deepCopy(fields()[6].schema(), other.rackname);
fieldSetFlags()[6] = true;
}
if (isValidValue(fields()[7], other.status)) {
this.status = data().deepCopy(fields()[7].schema(), other.status);
fieldSetFlags()[7] = true;
}
if (isValidValue(fields()[8], other.error)) {
this.error = data().deepCopy(fields()[8].schema(), other.error);
fieldSetFlags()[8] = true;
}
if (isValidValue(fields()[9], other.counters)) {
this.counters = data().deepCopy(fields()[9].schema(), other.counters);
fieldSetFlags()[9] = true;
}
this.countersBuilder = null;
if (isValidValue(fields()[10], other.clockSplits)) {
this.clockSplits = data().deepCopy(fields()[10].schema(), other.clockSplits);
fieldSetFlags()[10] = true;
}
if (isValidValue(fields()[11], other.cpuUsages)) {
this.cpuUsages = data().deepCopy(fields()[11].schema(), other.cpuUsages);
fieldSetFlags()[11] = true;
}
if (isValidValue(fields()[12], other.vMemKbytes)) {
this.vMemKbytes = data().deepCopy(fields()[12].schema(), other.vMemKbytes);
fieldSetFlags()[12] = true;
}
if (isValidValue(fields()[13], other.physMemKbytes)) {
this.physMemKbytes = data().deepCopy(fields()[13].schema(), other.physMemKbytes);
fieldSetFlags()[13] = true;
}
}
/**
* Gets the value of the 'taskid' field.
* @return The value.
*/
public java.lang.CharSequence getTaskid() {
return taskid;
}
/**
* Sets the value of the 'taskid' field.
* @param value The value of 'taskid'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder setTaskid(java.lang.CharSequence value) {
validate(fields()[0], value);
this.taskid = value;
fieldSetFlags()[0] = true;
return this;
}
/**
* Checks whether the 'taskid' field has been set.
* @return True if the 'taskid' field has been set, false otherwise.
*/
public boolean hasTaskid() {
return fieldSetFlags()[0];
}
/**
* Clears the value of the 'taskid' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder clearTaskid() {
taskid = null;
fieldSetFlags()[0] = false;
return this;
}
/**
* Gets the value of the 'taskType' field.
* @return The value.
*/
public java.lang.CharSequence getTaskType() {
return taskType;
}
/**
* Sets the value of the 'taskType' field.
* @param value The value of 'taskType'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder setTaskType(java.lang.CharSequence value) {
validate(fields()[1], value);
this.taskType = value;
fieldSetFlags()[1] = true;
return this;
}
/**
* Checks whether the 'taskType' field has been set.
* @return True if the 'taskType' field has been set, false otherwise.
*/
public boolean hasTaskType() {
return fieldSetFlags()[1];
}
/**
* Clears the value of the 'taskType' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder clearTaskType() {
taskType = null;
fieldSetFlags()[1] = false;
return this;
}
/**
* Gets the value of the 'attemptId' field.
* @return The value.
*/
public java.lang.CharSequence getAttemptId() {
return attemptId;
}
/**
* Sets the value of the 'attemptId' field.
* @param value The value of 'attemptId'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder setAttemptId(java.lang.CharSequence value) {
validate(fields()[2], value);
this.attemptId = value;
fieldSetFlags()[2] = true;
return this;
}
/**
* Checks whether the 'attemptId' field has been set.
* @return True if the 'attemptId' field has been set, false otherwise.
*/
public boolean hasAttemptId() {
return fieldSetFlags()[2];
}
/**
* Clears the value of the 'attemptId' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder clearAttemptId() {
attemptId = null;
fieldSetFlags()[2] = false;
return this;
}
/**
* Gets the value of the 'finishTime' field.
* @return The value.
*/
public long getFinishTime() {
return finishTime;
}
/**
* Sets the value of the 'finishTime' field.
* @param value The value of 'finishTime'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder setFinishTime(long value) {
validate(fields()[3], value);
this.finishTime = value;
fieldSetFlags()[3] = true;
return this;
}
/**
* Checks whether the 'finishTime' field has been set.
* @return True if the 'finishTime' field has been set, false otherwise.
*/
public boolean hasFinishTime() {
return fieldSetFlags()[3];
}
/**
* Clears the value of the 'finishTime' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder clearFinishTime() {
fieldSetFlags()[3] = false;
return this;
}
/**
* Gets the value of the 'hostname' field.
* @return The value.
*/
public java.lang.CharSequence getHostname() {
return hostname;
}
/**
* Sets the value of the 'hostname' field.
* @param value The value of 'hostname'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder setHostname(java.lang.CharSequence value) {
validate(fields()[4], value);
this.hostname = value;
fieldSetFlags()[4] = true;
return this;
}
/**
* Checks whether the 'hostname' field has been set.
* @return True if the 'hostname' field has been set, false otherwise.
*/
public boolean hasHostname() {
return fieldSetFlags()[4];
}
/**
* Clears the value of the 'hostname' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder clearHostname() {
hostname = null;
fieldSetFlags()[4] = false;
return this;
}
/**
* Gets the value of the 'port' field.
* @return The value.
*/
public int getPort() {
return port;
}
/**
* Sets the value of the 'port' field.
* @param value The value of 'port'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder setPort(int value) {
validate(fields()[5], value);
this.port = value;
fieldSetFlags()[5] = true;
return this;
}
/**
* Checks whether the 'port' field has been set.
* @return True if the 'port' field has been set, false otherwise.
*/
public boolean hasPort() {
return fieldSetFlags()[5];
}
/**
* Clears the value of the 'port' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder clearPort() {
fieldSetFlags()[5] = false;
return this;
}
/**
* Gets the value of the 'rackname' field.
* @return The value.
*/
public java.lang.CharSequence getRackname() {
return rackname;
}
/**
* Sets the value of the 'rackname' field.
* @param value The value of 'rackname'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder setRackname(java.lang.CharSequence value) {
validate(fields()[6], value);
this.rackname = value;
fieldSetFlags()[6] = true;
return this;
}
/**
* Checks whether the 'rackname' field has been set.
* @return True if the 'rackname' field has been set, false otherwise.
*/
public boolean hasRackname() {
return fieldSetFlags()[6];
}
/**
* Clears the value of the 'rackname' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder clearRackname() {
rackname = null;
fieldSetFlags()[6] = false;
return this;
}
/**
* Gets the value of the 'status' field.
* @return The value.
*/
public java.lang.CharSequence getStatus() {
return status;
}
/**
* Sets the value of the 'status' field.
* @param value The value of 'status'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder setStatus(java.lang.CharSequence value) {
validate(fields()[7], value);
this.status = value;
fieldSetFlags()[7] = true;
return this;
}
/**
* Checks whether the 'status' field has been set.
* @return True if the 'status' field has been set, false otherwise.
*/
public boolean hasStatus() {
return fieldSetFlags()[7];
}
/**
* Clears the value of the 'status' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder clearStatus() {
status = null;
fieldSetFlags()[7] = false;
return this;
}
/**
* Gets the value of the 'error' field.
* @return The value.
*/
public java.lang.CharSequence getError() {
return error;
}
/**
* Sets the value of the 'error' field.
* @param value The value of 'error'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder setError(java.lang.CharSequence value) {
validate(fields()[8], value);
this.error = value;
fieldSetFlags()[8] = true;
return this;
}
/**
* Checks whether the 'error' field has been set.
* @return True if the 'error' field has been set, false otherwise.
*/
public boolean hasError() {
return fieldSetFlags()[8];
}
/**
* Clears the value of the 'error' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder clearError() {
error = null;
fieldSetFlags()[8] = false;
return this;
}
/**
* Gets the value of the 'counters' field.
* @return The value.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters getCounters() {
return counters;
}
/**
* Sets the value of the 'counters' field.
* @param value The value of 'counters'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder setCounters(org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters value) {
validate(fields()[9], value);
this.countersBuilder = null;
this.counters = value;
fieldSetFlags()[9] = true;
return this;
}
/**
* Checks whether the 'counters' field has been set.
* @return True if the 'counters' field has been set, false otherwise.
*/
public boolean hasCounters() {
return fieldSetFlags()[9];
}
/**
* Gets the Builder instance for the 'counters' field and creates one if it doesn't exist yet.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters.Builder getCountersBuilder() {
if (countersBuilder == null) {
if (hasCounters()) {
setCountersBuilder(org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters.newBuilder(counters));
} else {
setCountersBuilder(org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters.newBuilder());
}
}
return countersBuilder;
}
/**
* Sets the Builder instance for the 'counters' field
* @param value The builder instance that must be set.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder setCountersBuilder(org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters.Builder value) {
clearCounters();
countersBuilder = value;
return this;
}
/**
* Checks whether the 'counters' field has an active Builder instance
* @return True if the 'counters' field has an active Builder instance
*/
public boolean hasCountersBuilder() {
return countersBuilder != null;
}
/**
* Clears the value of the 'counters' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder clearCounters() {
counters = null;
countersBuilder = null;
fieldSetFlags()[9] = false;
return this;
}
/**
* Gets the value of the 'clockSplits' field.
* @return The value.
*/
public java.util.List getClockSplits() {
return clockSplits;
}
/**
* Sets the value of the 'clockSplits' field.
* @param value The value of 'clockSplits'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder setClockSplits(java.util.List value) {
validate(fields()[10], value);
this.clockSplits = value;
fieldSetFlags()[10] = true;
return this;
}
/**
* Checks whether the 'clockSplits' field has been set.
* @return True if the 'clockSplits' field has been set, false otherwise.
*/
public boolean hasClockSplits() {
return fieldSetFlags()[10];
}
/**
* Clears the value of the 'clockSplits' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder clearClockSplits() {
clockSplits = null;
fieldSetFlags()[10] = false;
return this;
}
/**
* Gets the value of the 'cpuUsages' field.
* @return The value.
*/
public java.util.List getCpuUsages() {
return cpuUsages;
}
/**
* Sets the value of the 'cpuUsages' field.
* @param value The value of 'cpuUsages'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder setCpuUsages(java.util.List value) {
validate(fields()[11], value);
this.cpuUsages = value;
fieldSetFlags()[11] = true;
return this;
}
/**
* Checks whether the 'cpuUsages' field has been set.
* @return True if the 'cpuUsages' field has been set, false otherwise.
*/
public boolean hasCpuUsages() {
return fieldSetFlags()[11];
}
/**
* Clears the value of the 'cpuUsages' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder clearCpuUsages() {
cpuUsages = null;
fieldSetFlags()[11] = false;
return this;
}
/**
* Gets the value of the 'vMemKbytes' field.
* @return The value.
*/
public java.util.List getVMemKbytes() {
return vMemKbytes;
}
/**
* Sets the value of the 'vMemKbytes' field.
* @param value The value of 'vMemKbytes'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder setVMemKbytes(java.util.List value) {
validate(fields()[12], value);
this.vMemKbytes = value;
fieldSetFlags()[12] = true;
return this;
}
/**
* Checks whether the 'vMemKbytes' field has been set.
* @return True if the 'vMemKbytes' field has been set, false otherwise.
*/
public boolean hasVMemKbytes() {
return fieldSetFlags()[12];
}
/**
* Clears the value of the 'vMemKbytes' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder clearVMemKbytes() {
vMemKbytes = null;
fieldSetFlags()[12] = false;
return this;
}
/**
* Gets the value of the 'physMemKbytes' field.
* @return The value.
*/
public java.util.List getPhysMemKbytes() {
return physMemKbytes;
}
/**
* Sets the value of the 'physMemKbytes' field.
* @param value The value of 'physMemKbytes'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder setPhysMemKbytes(java.util.List value) {
validate(fields()[13], value);
this.physMemKbytes = value;
fieldSetFlags()[13] = true;
return this;
}
/**
* Checks whether the 'physMemKbytes' field has been set.
* @return True if the 'physMemKbytes' field has been set, false otherwise.
*/
public boolean hasPhysMemKbytes() {
return fieldSetFlags()[13];
}
/**
* Clears the value of the 'physMemKbytes' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder clearPhysMemKbytes() {
physMemKbytes = null;
fieldSetFlags()[13] = false;
return this;
}
@Override
@SuppressWarnings("unchecked")
public TaskAttemptUnsuccessfulCompletion build() {
try {
TaskAttemptUnsuccessfulCompletion record = new TaskAttemptUnsuccessfulCompletion();
record.taskid = fieldSetFlags()[0] ? this.taskid : (java.lang.CharSequence) defaultValue(fields()[0]);
record.taskType = fieldSetFlags()[1] ? this.taskType : (java.lang.CharSequence) defaultValue(fields()[1]);
record.attemptId = fieldSetFlags()[2] ? this.attemptId : (java.lang.CharSequence) defaultValue(fields()[2]);
record.finishTime = fieldSetFlags()[3] ? this.finishTime : (java.lang.Long) defaultValue(fields()[3]);
record.hostname = fieldSetFlags()[4] ? this.hostname : (java.lang.CharSequence) defaultValue(fields()[4]);
record.port = fieldSetFlags()[5] ? this.port : (java.lang.Integer) defaultValue(fields()[5]);
record.rackname = fieldSetFlags()[6] ? this.rackname : (java.lang.CharSequence) defaultValue(fields()[6]);
record.status = fieldSetFlags()[7] ? this.status : (java.lang.CharSequence) defaultValue(fields()[7]);
record.error = fieldSetFlags()[8] ? this.error : (java.lang.CharSequence) defaultValue(fields()[8]);
if (countersBuilder != null) {
try {
record.counters = this.countersBuilder.build();
} catch (org.apache.hadoop.shaded.org.apache.avro.AvroMissingFieldException e) {
e.addParentField(record.getSchema().getField("counters"));
throw e;
}
} else {
record.counters = fieldSetFlags()[9] ? this.counters : (org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters) defaultValue(fields()[9]);
}
record.clockSplits = fieldSetFlags()[10] ? this.clockSplits : (java.util.List) defaultValue(fields()[10]);
record.cpuUsages = fieldSetFlags()[11] ? this.cpuUsages : (java.util.List) defaultValue(fields()[11]);
record.vMemKbytes = fieldSetFlags()[12] ? this.vMemKbytes : (java.util.List) defaultValue(fields()[12]);
record.physMemKbytes = fieldSetFlags()[13] ? this.physMemKbytes : (java.util.List) defaultValue(fields()[13]);
return record;
} catch (org.apache.hadoop.shaded.org.apache.avro.AvroMissingFieldException e) {
throw e;
} catch (java.lang.Exception e) {
throw new org.apache.avro.AvroRuntimeException(e);
}
}
}
@SuppressWarnings("unchecked")
private static final org.apache.hadoop.shaded.org.apache.avro.io.DatumWriter
WRITER$ = (org.apache.hadoop.shaded.org.apache.avro.io.DatumWriter)MODEL$.createDatumWriter(SCHEMA$);
@Override public void writeExternal(java.io.ObjectOutput out)
throws java.io.IOException {
WRITER$.write(this, SpecificData.getEncoder(out));
}
@SuppressWarnings("unchecked")
private static final org.apache.hadoop.shaded.org.apache.avro.io.DatumReader
READER$ = (org.apache.hadoop.shaded.org.apache.avro.io.DatumReader)MODEL$.createDatumReader(SCHEMA$);
@Override public void readExternal(java.io.ObjectInput in)
throws java.io.IOException {
READER$.read(this, SpecificData.getDecoder(in));
}
@Override protected boolean hasCustomCoders() { return true; }
@Override public void customEncode(org.apache.hadoop.shaded.org.apache.avro.io.Encoder out)
throws java.io.IOException
{
out.writeString(this.taskid);
out.writeString(this.taskType);
out.writeString(this.attemptId);
out.writeLong(this.finishTime);
out.writeString(this.hostname);
out.writeInt(this.port);
out.writeString(this.rackname);
out.writeString(this.status);
out.writeString(this.error);
if (this.counters == null) {
out.writeIndex(0);
out.writeNull();
} else {
out.writeIndex(1);
this.counters.customEncode(out);
}
long size0 = this.clockSplits.size();
out.writeArrayStart();
out.setItemCount(size0);
long actualSize0 = 0;
for (java.lang.Integer e0: this.clockSplits) {
actualSize0++;
out.startItem();
out.writeInt(e0);
}
out.writeArrayEnd();
if (actualSize0 != size0)
throw new java.util.ConcurrentModificationException("Array-size written was " + size0 + ", but element count was " + actualSize0 + ".");
long size1 = this.cpuUsages.size();
out.writeArrayStart();
out.setItemCount(size1);
long actualSize1 = 0;
for (java.lang.Integer e1: this.cpuUsages) {
actualSize1++;
out.startItem();
out.writeInt(e1);
}
out.writeArrayEnd();
if (actualSize1 != size1)
throw new java.util.ConcurrentModificationException("Array-size written was " + size1 + ", but element count was " + actualSize1 + ".");
long size2 = this.vMemKbytes.size();
out.writeArrayStart();
out.setItemCount(size2);
long actualSize2 = 0;
for (java.lang.Integer e2: this.vMemKbytes) {
actualSize2++;
out.startItem();
out.writeInt(e2);
}
out.writeArrayEnd();
if (actualSize2 != size2)
throw new java.util.ConcurrentModificationException("Array-size written was " + size2 + ", but element count was " + actualSize2 + ".");
long size3 = this.physMemKbytes.size();
out.writeArrayStart();
out.setItemCount(size3);
long actualSize3 = 0;
for (java.lang.Integer e3: this.physMemKbytes) {
actualSize3++;
out.startItem();
out.writeInt(e3);
}
out.writeArrayEnd();
if (actualSize3 != size3)
throw new java.util.ConcurrentModificationException("Array-size written was " + size3 + ", but element count was " + actualSize3 + ".");
}
@Override public void customDecode(org.apache.hadoop.shaded.org.apache.avro.io.ResolvingDecoder in)
throws java.io.IOException
{
org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff();
if (fieldOrder == null) {
this.taskid = in.readString(this.taskid instanceof Utf8 ? (Utf8)this.taskid : null);
this.taskType = in.readString(this.taskType instanceof Utf8 ? (Utf8)this.taskType : null);
this.attemptId = in.readString(this.attemptId instanceof Utf8 ? (Utf8)this.attemptId : null);
this.finishTime = in.readLong();
this.hostname = in.readString(this.hostname instanceof Utf8 ? (Utf8)this.hostname : null);
this.port = in.readInt();
this.rackname = in.readString(this.rackname instanceof Utf8 ? (Utf8)this.rackname : null);
this.status = in.readString(this.status instanceof Utf8 ? (Utf8)this.status : null);
this.error = in.readString(this.error instanceof Utf8 ? (Utf8)this.error : null);
if (in.readIndex() != 1) {
in.readNull();
this.counters = null;
} else {
if (this.counters == null) {
this.counters = new org.apache.hadoop.mapreduce.jobhistory.JhCounters();
}
this.counters.customDecode(in);
}
long size0 = in.readArrayStart();
java.util.List a0 = this.clockSplits;
if (a0 == null) {
a0 = new SpecificData.Array((int)size0, SCHEMA$.getField("clockSplits").schema());
this.clockSplits = a0;
} else a0.clear();
SpecificData.Array ga0 = (a0 instanceof SpecificData.Array ? (SpecificData.Array)a0 : null);
for ( ; 0 < size0; size0 = in.arrayNext()) {
for ( ; size0 != 0; size0--) {
java.lang.Integer e0 = (ga0 != null ? ga0.peek() : null);
e0 = in.readInt();
a0.add(e0);
}
}
long size1 = in.readArrayStart();
java.util.List a1 = this.cpuUsages;
if (a1 == null) {
a1 = new SpecificData.Array((int)size1, SCHEMA$.getField("cpuUsages").schema());
this.cpuUsages = a1;
} else a1.clear();
SpecificData.Array ga1 = (a1 instanceof SpecificData.Array ? (SpecificData.Array)a1 : null);
for ( ; 0 < size1; size1 = in.arrayNext()) {
for ( ; size1 != 0; size1--) {
java.lang.Integer e1 = (ga1 != null ? ga1.peek() : null);
e1 = in.readInt();
a1.add(e1);
}
}
long size2 = in.readArrayStart();
java.util.List a2 = this.vMemKbytes;
if (a2 == null) {
a2 = new SpecificData.Array((int)size2, SCHEMA$.getField("vMemKbytes").schema());
this.vMemKbytes = a2;
} else a2.clear();
SpecificData.Array ga2 = (a2 instanceof SpecificData.Array ? (SpecificData.Array)a2 : null);
for ( ; 0 < size2; size2 = in.arrayNext()) {
for ( ; size2 != 0; size2--) {
java.lang.Integer e2 = (ga2 != null ? ga2.peek() : null);
e2 = in.readInt();
a2.add(e2);
}
}
long size3 = in.readArrayStart();
java.util.List a3 = this.physMemKbytes;
if (a3 == null) {
a3 = new SpecificData.Array((int)size3, SCHEMA$.getField("physMemKbytes").schema());
this.physMemKbytes = a3;
} else a3.clear();
SpecificData.Array ga3 = (a3 instanceof SpecificData.Array ? (SpecificData.Array)a3 : null);
for ( ; 0 < size3; size3 = in.arrayNext()) {
for ( ; size3 != 0; size3--) {
java.lang.Integer e3 = (ga3 != null ? ga3.peek() : null);
e3 = in.readInt();
a3.add(e3);
}
}
} else {
for (int i = 0; i < 14; i++) {
switch (fieldOrder[i].pos()) {
case 0:
this.taskid = in.readString(this.taskid instanceof Utf8 ? (Utf8)this.taskid : null);
break;
case 1:
this.taskType = in.readString(this.taskType instanceof Utf8 ? (Utf8)this.taskType : null);
break;
case 2:
this.attemptId = in.readString(this.attemptId instanceof Utf8 ? (Utf8)this.attemptId : null);
break;
case 3:
this.finishTime = in.readLong();
break;
case 4:
this.hostname = in.readString(this.hostname instanceof Utf8 ? (Utf8)this.hostname : null);
break;
case 5:
this.port = in.readInt();
break;
case 6:
this.rackname = in.readString(this.rackname instanceof Utf8 ? (Utf8)this.rackname : null);
break;
case 7:
this.status = in.readString(this.status instanceof Utf8 ? (Utf8)this.status : null);
break;
case 8:
this.error = in.readString(this.error instanceof Utf8 ? (Utf8)this.error : null);
break;
case 9:
if (in.readIndex() != 1) {
in.readNull();
this.counters = null;
} else {
if (this.counters == null) {
this.counters = new org.apache.hadoop.mapreduce.jobhistory.JhCounters();
}
this.counters.customDecode(in);
}
break;
case 10:
long size0 = in.readArrayStart();
java.util.List a0 = this.clockSplits;
if (a0 == null) {
a0 = new SpecificData.Array((int)size0, SCHEMA$.getField("clockSplits").schema());
this.clockSplits = a0;
} else a0.clear();
SpecificData.Array ga0 = (a0 instanceof SpecificData.Array ? (SpecificData.Array)a0 : null);
for ( ; 0 < size0; size0 = in.arrayNext()) {
for ( ; size0 != 0; size0--) {
java.lang.Integer e0 = (ga0 != null ? ga0.peek() : null);
e0 = in.readInt();
a0.add(e0);
}
}
break;
case 11:
long size1 = in.readArrayStart();
java.util.List a1 = this.cpuUsages;
if (a1 == null) {
a1 = new SpecificData.Array((int)size1, SCHEMA$.getField("cpuUsages").schema());
this.cpuUsages = a1;
} else a1.clear();
SpecificData.Array ga1 = (a1 instanceof SpecificData.Array ? (SpecificData.Array)a1 : null);
for ( ; 0 < size1; size1 = in.arrayNext()) {
for ( ; size1 != 0; size1--) {
java.lang.Integer e1 = (ga1 != null ? ga1.peek() : null);
e1 = in.readInt();
a1.add(e1);
}
}
break;
case 12:
long size2 = in.readArrayStart();
java.util.List a2 = this.vMemKbytes;
if (a2 == null) {
a2 = new SpecificData.Array((int)size2, SCHEMA$.getField("vMemKbytes").schema());
this.vMemKbytes = a2;
} else a2.clear();
SpecificData.Array ga2 = (a2 instanceof SpecificData.Array ? (SpecificData.Array)a2 : null);
for ( ; 0 < size2; size2 = in.arrayNext()) {
for ( ; size2 != 0; size2--) {
java.lang.Integer e2 = (ga2 != null ? ga2.peek() : null);
e2 = in.readInt();
a2.add(e2);
}
}
break;
case 13:
long size3 = in.readArrayStart();
java.util.List a3 = this.physMemKbytes;
if (a3 == null) {
a3 = new SpecificData.Array((int)size3, SCHEMA$.getField("physMemKbytes").schema());
this.physMemKbytes = a3;
} else a3.clear();
SpecificData.Array ga3 = (a3 instanceof SpecificData.Array ? (SpecificData.Array)a3 : null);
for ( ; 0 < size3; size3 = in.arrayNext()) {
for ( ; size3 != 0; size3--) {
java.lang.Integer e3 = (ga3 != null ? ga3.peek() : null);
e3 = in.readInt();
a3.add(e3);
}
}
break;
default:
throw new java.io.IOException("Corrupt ResolvingDecoder.");
}
}
}
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy