
org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished Maven / Gradle / Ivy
/**
* Autogenerated by Avro
*
* DO NOT EDIT DIRECTLY
*/
package org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory;
import org.apache.hadoop.shaded.org.apache.avro.generic.GenericArray;
import org.apache.hadoop.shaded.org.apache.avro.specific.SpecificData;
import org.apache.hadoop.shaded.org.apache.avro.util.Utf8;
import org.apache.hadoop.shaded.org.apache.avro.message.BinaryMessageEncoder;
import org.apache.hadoop.shaded.org.apache.avro.message.BinaryMessageDecoder;
import org.apache.hadoop.shaded.org.apache.avro.message.SchemaStore;
@org.apache.hadoop.shaded.org.apache.avro.specific.AvroGenerated
public class ReduceAttemptFinished extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
private static final long serialVersionUID = 7874647271481594404L;
public static final org.apache.hadoop.shaded.org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"ReduceAttemptFinished\",\"namespace\":\"org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory\",\"fields\":[{\"name\":\"taskid\",\"type\":\"string\"},{\"name\":\"attemptId\",\"type\":\"string\"},{\"name\":\"taskType\",\"type\":\"string\"},{\"name\":\"taskStatus\",\"type\":\"string\"},{\"name\":\"shuffleFinishTime\",\"type\":\"long\"},{\"name\":\"sortFinishTime\",\"type\":\"long\"},{\"name\":\"finishTime\",\"type\":\"long\"},{\"name\":\"hostname\",\"type\":\"string\"},{\"name\":\"port\",\"type\":\"int\"},{\"name\":\"rackname\",\"type\":\"string\"},{\"name\":\"state\",\"type\":\"string\"},{\"name\":\"counters\",\"type\":{\"type\":\"record\",\"name\":\"JhCounters\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"groups\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"JhCounterGroup\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"displayName\",\"type\":\"string\"},{\"name\":\"counts\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"JhCounter\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"displayName\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"long\"}]}}}]}}}]}},{\"name\":\"clockSplits\",\"type\":{\"type\":\"array\",\"items\":\"int\"}},{\"name\":\"cpuUsages\",\"type\":{\"type\":\"array\",\"items\":\"int\"}},{\"name\":\"vMemKbytes\",\"type\":{\"type\":\"array\",\"items\":\"int\"}},{\"name\":\"physMemKbytes\",\"type\":{\"type\":\"array\",\"items\":\"int\"}}]}");
public static org.apache.hadoop.shaded.org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
private static SpecificData MODEL$ = new SpecificData();
private static final BinaryMessageEncoder ENCODER =
new BinaryMessageEncoder(MODEL$, SCHEMA$);
private static final BinaryMessageDecoder DECODER =
new BinaryMessageDecoder(MODEL$, SCHEMA$);
/**
* Return the BinaryMessageEncoder instance used by this class.
* @return the message encoder used by this class
*/
public static BinaryMessageEncoder getEncoder() {
return ENCODER;
}
/**
* Return the BinaryMessageDecoder instance used by this class.
* @return the message decoder used by this class
*/
public static BinaryMessageDecoder getDecoder() {
return DECODER;
}
/**
* Create a new BinaryMessageDecoder instance for this class that uses the specified {@link SchemaStore}.
* @param resolver a {@link SchemaStore} used to find schemas by fingerprint
* @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore
*/
public static BinaryMessageDecoder createDecoder(SchemaStore resolver) {
return new BinaryMessageDecoder(MODEL$, SCHEMA$, resolver);
}
/**
* Serializes this ReduceAttemptFinished to a ByteBuffer.
* @return a buffer holding the serialized data for this instance
* @throws java.io.IOException if this instance could not be serialized
*/
public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException {
return ENCODER.encode(this);
}
/**
* Deserializes a ReduceAttemptFinished from a ByteBuffer.
* @param b a byte buffer holding serialized data for an instance of this class
* @return a ReduceAttemptFinished instance decoded from the given buffer
* @throws java.io.IOException if the given bytes could not be deserialized into an instance of this class
*/
public static ReduceAttemptFinished fromByteBuffer(
java.nio.ByteBuffer b) throws java.io.IOException {
return DECODER.decode(b);
}
@Deprecated public java.lang.CharSequence taskid;
@Deprecated public java.lang.CharSequence attemptId;
@Deprecated public java.lang.CharSequence taskType;
@Deprecated public java.lang.CharSequence taskStatus;
@Deprecated public long shuffleFinishTime;
@Deprecated public long sortFinishTime;
@Deprecated public long finishTime;
@Deprecated public java.lang.CharSequence hostname;
@Deprecated public int port;
@Deprecated public java.lang.CharSequence rackname;
@Deprecated public java.lang.CharSequence state;
@Deprecated public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters counters;
@Deprecated public java.util.List clockSplits;
@Deprecated public java.util.List cpuUsages;
@Deprecated public java.util.List vMemKbytes;
@Deprecated public java.util.List physMemKbytes;
/**
* Default constructor. Note that this does not initialize fields
* to their default values from the schema. If that is desired then
* one should use newBuilder()
.
*/
public ReduceAttemptFinished() {}
/**
* All-args constructor.
* @param taskid The new value for taskid
* @param attemptId The new value for attemptId
* @param taskType The new value for taskType
* @param taskStatus The new value for taskStatus
* @param shuffleFinishTime The new value for shuffleFinishTime
* @param sortFinishTime The new value for sortFinishTime
* @param finishTime The new value for finishTime
* @param hostname The new value for hostname
* @param port The new value for port
* @param rackname The new value for rackname
* @param state The new value for state
* @param counters The new value for counters
* @param clockSplits The new value for clockSplits
* @param cpuUsages The new value for cpuUsages
* @param vMemKbytes The new value for vMemKbytes
* @param physMemKbytes The new value for physMemKbytes
*/
public ReduceAttemptFinished(java.lang.CharSequence taskid, java.lang.CharSequence attemptId, java.lang.CharSequence taskType, java.lang.CharSequence taskStatus, java.lang.Long shuffleFinishTime, java.lang.Long sortFinishTime, java.lang.Long finishTime, java.lang.CharSequence hostname, java.lang.Integer port, java.lang.CharSequence rackname, java.lang.CharSequence state, org.apache.hadoop.mapreduce.jobhistory.JhCounters counters, java.util.List clockSplits, java.util.List cpuUsages, java.util.List vMemKbytes, java.util.List physMemKbytes) {
this.taskid = taskid;
this.attemptId = attemptId;
this.taskType = taskType;
this.taskStatus = taskStatus;
this.shuffleFinishTime = shuffleFinishTime;
this.sortFinishTime = sortFinishTime;
this.finishTime = finishTime;
this.hostname = hostname;
this.port = port;
this.rackname = rackname;
this.state = state;
this.counters = counters;
this.clockSplits = clockSplits;
this.cpuUsages = cpuUsages;
this.vMemKbytes = vMemKbytes;
this.physMemKbytes = physMemKbytes;
}
public org.apache.hadoop.shaded.org.apache.avro.specific.SpecificData getSpecificData() { return MODEL$; }
public org.apache.hadoop.shaded.org.apache.avro.Schema getSchema() { return SCHEMA$; }
// Used by DatumWriter. Applications should not call.
public java.lang.Object get(int field$) {
switch (field$) {
case 0: return taskid;
case 1: return attemptId;
case 2: return taskType;
case 3: return taskStatus;
case 4: return shuffleFinishTime;
case 5: return sortFinishTime;
case 6: return finishTime;
case 7: return hostname;
case 8: return port;
case 9: return rackname;
case 10: return state;
case 11: return counters;
case 12: return clockSplits;
case 13: return cpuUsages;
case 14: return vMemKbytes;
case 15: return physMemKbytes;
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
}
}
// Used by DatumReader. Applications should not call.
@SuppressWarnings(value="unchecked")
public void put(int field$, java.lang.Object value$) {
switch (field$) {
case 0: taskid = (java.lang.CharSequence)value$; break;
case 1: attemptId = (java.lang.CharSequence)value$; break;
case 2: taskType = (java.lang.CharSequence)value$; break;
case 3: taskStatus = (java.lang.CharSequence)value$; break;
case 4: shuffleFinishTime = (java.lang.Long)value$; break;
case 5: sortFinishTime = (java.lang.Long)value$; break;
case 6: finishTime = (java.lang.Long)value$; break;
case 7: hostname = (java.lang.CharSequence)value$; break;
case 8: port = (java.lang.Integer)value$; break;
case 9: rackname = (java.lang.CharSequence)value$; break;
case 10: state = (java.lang.CharSequence)value$; break;
case 11: counters = (org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters)value$; break;
case 12: clockSplits = (java.util.List)value$; break;
case 13: cpuUsages = (java.util.List)value$; break;
case 14: vMemKbytes = (java.util.List)value$; break;
case 15: physMemKbytes = (java.util.List)value$; break;
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
}
}
/**
* Gets the value of the 'taskid' field.
* @return The value of the 'taskid' field.
*/
public java.lang.CharSequence getTaskid() {
return taskid;
}
/**
* Sets the value of the 'taskid' field.
* @param value the value to set.
*/
public void setTaskid(java.lang.CharSequence value) {
this.taskid = value;
}
/**
* Gets the value of the 'attemptId' field.
* @return The value of the 'attemptId' field.
*/
public java.lang.CharSequence getAttemptId() {
return attemptId;
}
/**
* Sets the value of the 'attemptId' field.
* @param value the value to set.
*/
public void setAttemptId(java.lang.CharSequence value) {
this.attemptId = value;
}
/**
* Gets the value of the 'taskType' field.
* @return The value of the 'taskType' field.
*/
public java.lang.CharSequence getTaskType() {
return taskType;
}
/**
* Sets the value of the 'taskType' field.
* @param value the value to set.
*/
public void setTaskType(java.lang.CharSequence value) {
this.taskType = value;
}
/**
* Gets the value of the 'taskStatus' field.
* @return The value of the 'taskStatus' field.
*/
public java.lang.CharSequence getTaskStatus() {
return taskStatus;
}
/**
* Sets the value of the 'taskStatus' field.
* @param value the value to set.
*/
public void setTaskStatus(java.lang.CharSequence value) {
this.taskStatus = value;
}
/**
* Gets the value of the 'shuffleFinishTime' field.
* @return The value of the 'shuffleFinishTime' field.
*/
public long getShuffleFinishTime() {
return shuffleFinishTime;
}
/**
* Sets the value of the 'shuffleFinishTime' field.
* @param value the value to set.
*/
public void setShuffleFinishTime(long value) {
this.shuffleFinishTime = value;
}
/**
* Gets the value of the 'sortFinishTime' field.
* @return The value of the 'sortFinishTime' field.
*/
public long getSortFinishTime() {
return sortFinishTime;
}
/**
* Sets the value of the 'sortFinishTime' field.
* @param value the value to set.
*/
public void setSortFinishTime(long value) {
this.sortFinishTime = value;
}
/**
* Gets the value of the 'finishTime' field.
* @return The value of the 'finishTime' field.
*/
public long getFinishTime() {
return finishTime;
}
/**
* Sets the value of the 'finishTime' field.
* @param value the value to set.
*/
public void setFinishTime(long value) {
this.finishTime = value;
}
/**
* Gets the value of the 'hostname' field.
* @return The value of the 'hostname' field.
*/
public java.lang.CharSequence getHostname() {
return hostname;
}
/**
* Sets the value of the 'hostname' field.
* @param value the value to set.
*/
public void setHostname(java.lang.CharSequence value) {
this.hostname = value;
}
/**
* Gets the value of the 'port' field.
* @return The value of the 'port' field.
*/
public int getPort() {
return port;
}
/**
* Sets the value of the 'port' field.
* @param value the value to set.
*/
public void setPort(int value) {
this.port = value;
}
/**
* Gets the value of the 'rackname' field.
* @return The value of the 'rackname' field.
*/
public java.lang.CharSequence getRackname() {
return rackname;
}
/**
* Sets the value of the 'rackname' field.
* @param value the value to set.
*/
public void setRackname(java.lang.CharSequence value) {
this.rackname = value;
}
/**
* Gets the value of the 'state' field.
* @return The value of the 'state' field.
*/
public java.lang.CharSequence getState() {
return state;
}
/**
* Sets the value of the 'state' field.
* @param value the value to set.
*/
public void setState(java.lang.CharSequence value) {
this.state = value;
}
/**
* Gets the value of the 'counters' field.
* @return The value of the 'counters' field.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters getCounters() {
return counters;
}
/**
* Sets the value of the 'counters' field.
* @param value the value to set.
*/
public void setCounters(org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters value) {
this.counters = value;
}
/**
* Gets the value of the 'clockSplits' field.
* @return The value of the 'clockSplits' field.
*/
public java.util.List getClockSplits() {
return clockSplits;
}
/**
* Sets the value of the 'clockSplits' field.
* @param value the value to set.
*/
public void setClockSplits(java.util.List value) {
this.clockSplits = value;
}
/**
* Gets the value of the 'cpuUsages' field.
* @return The value of the 'cpuUsages' field.
*/
public java.util.List getCpuUsages() {
return cpuUsages;
}
/**
* Sets the value of the 'cpuUsages' field.
* @param value the value to set.
*/
public void setCpuUsages(java.util.List value) {
this.cpuUsages = value;
}
/**
* Gets the value of the 'vMemKbytes' field.
* @return The value of the 'vMemKbytes' field.
*/
public java.util.List getVMemKbytes() {
return vMemKbytes;
}
/**
* Sets the value of the 'vMemKbytes' field.
* @param value the value to set.
*/
public void setVMemKbytes(java.util.List value) {
this.vMemKbytes = value;
}
/**
* Gets the value of the 'physMemKbytes' field.
* @return The value of the 'physMemKbytes' field.
*/
public java.util.List getPhysMemKbytes() {
return physMemKbytes;
}
/**
* Sets the value of the 'physMemKbytes' field.
* @param value the value to set.
*/
public void setPhysMemKbytes(java.util.List value) {
this.physMemKbytes = value;
}
/**
* Creates a new ReduceAttemptFinished RecordBuilder.
* @return A new ReduceAttemptFinished RecordBuilder
*/
public static org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder newBuilder() {
return new org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder();
}
/**
* Creates a new ReduceAttemptFinished RecordBuilder by copying an existing Builder.
* @param other The existing builder to copy.
* @return A new ReduceAttemptFinished RecordBuilder
*/
public static org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder other) {
if (other == null) {
return new org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder();
} else {
return new org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder(other);
}
}
/**
* Creates a new ReduceAttemptFinished RecordBuilder by copying an existing ReduceAttemptFinished instance.
* @param other The existing instance to copy.
* @return A new ReduceAttemptFinished RecordBuilder
*/
public static org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished other) {
if (other == null) {
return new org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder();
} else {
return new org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder(other);
}
}
/**
* RecordBuilder for ReduceAttemptFinished instances.
*/
@org.apache.hadoop.shaded.org.apache.avro.specific.AvroGenerated
public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase
implements org.apache.avro.data.RecordBuilder {
private java.lang.CharSequence taskid;
private java.lang.CharSequence attemptId;
private java.lang.CharSequence taskType;
private java.lang.CharSequence taskStatus;
private long shuffleFinishTime;
private long sortFinishTime;
private long finishTime;
private java.lang.CharSequence hostname;
private int port;
private java.lang.CharSequence rackname;
private java.lang.CharSequence state;
private org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters counters;
private org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters.Builder countersBuilder;
private java.util.List clockSplits;
private java.util.List cpuUsages;
private java.util.List vMemKbytes;
private java.util.List physMemKbytes;
/** Creates a new Builder */
private Builder() {
super(SCHEMA$);
}
/**
* Creates a Builder by copying an existing Builder.
* @param other The existing Builder to copy.
*/
private Builder(org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder other) {
super(other);
if (isValidValue(fields()[0], other.taskid)) {
this.taskid = data().deepCopy(fields()[0].schema(), other.taskid);
fieldSetFlags()[0] = other.fieldSetFlags()[0];
}
if (isValidValue(fields()[1], other.attemptId)) {
this.attemptId = data().deepCopy(fields()[1].schema(), other.attemptId);
fieldSetFlags()[1] = other.fieldSetFlags()[1];
}
if (isValidValue(fields()[2], other.taskType)) {
this.taskType = data().deepCopy(fields()[2].schema(), other.taskType);
fieldSetFlags()[2] = other.fieldSetFlags()[2];
}
if (isValidValue(fields()[3], other.taskStatus)) {
this.taskStatus = data().deepCopy(fields()[3].schema(), other.taskStatus);
fieldSetFlags()[3] = other.fieldSetFlags()[3];
}
if (isValidValue(fields()[4], other.shuffleFinishTime)) {
this.shuffleFinishTime = data().deepCopy(fields()[4].schema(), other.shuffleFinishTime);
fieldSetFlags()[4] = other.fieldSetFlags()[4];
}
if (isValidValue(fields()[5], other.sortFinishTime)) {
this.sortFinishTime = data().deepCopy(fields()[5].schema(), other.sortFinishTime);
fieldSetFlags()[5] = other.fieldSetFlags()[5];
}
if (isValidValue(fields()[6], other.finishTime)) {
this.finishTime = data().deepCopy(fields()[6].schema(), other.finishTime);
fieldSetFlags()[6] = other.fieldSetFlags()[6];
}
if (isValidValue(fields()[7], other.hostname)) {
this.hostname = data().deepCopy(fields()[7].schema(), other.hostname);
fieldSetFlags()[7] = other.fieldSetFlags()[7];
}
if (isValidValue(fields()[8], other.port)) {
this.port = data().deepCopy(fields()[8].schema(), other.port);
fieldSetFlags()[8] = other.fieldSetFlags()[8];
}
if (isValidValue(fields()[9], other.rackname)) {
this.rackname = data().deepCopy(fields()[9].schema(), other.rackname);
fieldSetFlags()[9] = other.fieldSetFlags()[9];
}
if (isValidValue(fields()[10], other.state)) {
this.state = data().deepCopy(fields()[10].schema(), other.state);
fieldSetFlags()[10] = other.fieldSetFlags()[10];
}
if (isValidValue(fields()[11], other.counters)) {
this.counters = data().deepCopy(fields()[11].schema(), other.counters);
fieldSetFlags()[11] = other.fieldSetFlags()[11];
}
if (other.hasCountersBuilder()) {
this.countersBuilder = org.apache.hadoop.mapreduce.jobhistory.JhCounters.newBuilder(other.getCountersBuilder());
}
if (isValidValue(fields()[12], other.clockSplits)) {
this.clockSplits = data().deepCopy(fields()[12].schema(), other.clockSplits);
fieldSetFlags()[12] = other.fieldSetFlags()[12];
}
if (isValidValue(fields()[13], other.cpuUsages)) {
this.cpuUsages = data().deepCopy(fields()[13].schema(), other.cpuUsages);
fieldSetFlags()[13] = other.fieldSetFlags()[13];
}
if (isValidValue(fields()[14], other.vMemKbytes)) {
this.vMemKbytes = data().deepCopy(fields()[14].schema(), other.vMemKbytes);
fieldSetFlags()[14] = other.fieldSetFlags()[14];
}
if (isValidValue(fields()[15], other.physMemKbytes)) {
this.physMemKbytes = data().deepCopy(fields()[15].schema(), other.physMemKbytes);
fieldSetFlags()[15] = other.fieldSetFlags()[15];
}
}
/**
* Creates a Builder by copying an existing ReduceAttemptFinished instance
* @param other The existing instance to copy.
*/
private Builder(org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished other) {
super(SCHEMA$);
if (isValidValue(fields()[0], other.taskid)) {
this.taskid = data().deepCopy(fields()[0].schema(), other.taskid);
fieldSetFlags()[0] = true;
}
if (isValidValue(fields()[1], other.attemptId)) {
this.attemptId = data().deepCopy(fields()[1].schema(), other.attemptId);
fieldSetFlags()[1] = true;
}
if (isValidValue(fields()[2], other.taskType)) {
this.taskType = data().deepCopy(fields()[2].schema(), other.taskType);
fieldSetFlags()[2] = true;
}
if (isValidValue(fields()[3], other.taskStatus)) {
this.taskStatus = data().deepCopy(fields()[3].schema(), other.taskStatus);
fieldSetFlags()[3] = true;
}
if (isValidValue(fields()[4], other.shuffleFinishTime)) {
this.shuffleFinishTime = data().deepCopy(fields()[4].schema(), other.shuffleFinishTime);
fieldSetFlags()[4] = true;
}
if (isValidValue(fields()[5], other.sortFinishTime)) {
this.sortFinishTime = data().deepCopy(fields()[5].schema(), other.sortFinishTime);
fieldSetFlags()[5] = true;
}
if (isValidValue(fields()[6], other.finishTime)) {
this.finishTime = data().deepCopy(fields()[6].schema(), other.finishTime);
fieldSetFlags()[6] = true;
}
if (isValidValue(fields()[7], other.hostname)) {
this.hostname = data().deepCopy(fields()[7].schema(), other.hostname);
fieldSetFlags()[7] = true;
}
if (isValidValue(fields()[8], other.port)) {
this.port = data().deepCopy(fields()[8].schema(), other.port);
fieldSetFlags()[8] = true;
}
if (isValidValue(fields()[9], other.rackname)) {
this.rackname = data().deepCopy(fields()[9].schema(), other.rackname);
fieldSetFlags()[9] = true;
}
if (isValidValue(fields()[10], other.state)) {
this.state = data().deepCopy(fields()[10].schema(), other.state);
fieldSetFlags()[10] = true;
}
if (isValidValue(fields()[11], other.counters)) {
this.counters = data().deepCopy(fields()[11].schema(), other.counters);
fieldSetFlags()[11] = true;
}
this.countersBuilder = null;
if (isValidValue(fields()[12], other.clockSplits)) {
this.clockSplits = data().deepCopy(fields()[12].schema(), other.clockSplits);
fieldSetFlags()[12] = true;
}
if (isValidValue(fields()[13], other.cpuUsages)) {
this.cpuUsages = data().deepCopy(fields()[13].schema(), other.cpuUsages);
fieldSetFlags()[13] = true;
}
if (isValidValue(fields()[14], other.vMemKbytes)) {
this.vMemKbytes = data().deepCopy(fields()[14].schema(), other.vMemKbytes);
fieldSetFlags()[14] = true;
}
if (isValidValue(fields()[15], other.physMemKbytes)) {
this.physMemKbytes = data().deepCopy(fields()[15].schema(), other.physMemKbytes);
fieldSetFlags()[15] = true;
}
}
/**
* Gets the value of the 'taskid' field.
* @return The value.
*/
public java.lang.CharSequence getTaskid() {
return taskid;
}
/**
* Sets the value of the 'taskid' field.
* @param value The value of 'taskid'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder setTaskid(java.lang.CharSequence value) {
validate(fields()[0], value);
this.taskid = value;
fieldSetFlags()[0] = true;
return this;
}
/**
* Checks whether the 'taskid' field has been set.
* @return True if the 'taskid' field has been set, false otherwise.
*/
public boolean hasTaskid() {
return fieldSetFlags()[0];
}
/**
* Clears the value of the 'taskid' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder clearTaskid() {
taskid = null;
fieldSetFlags()[0] = false;
return this;
}
/**
* Gets the value of the 'attemptId' field.
* @return The value.
*/
public java.lang.CharSequence getAttemptId() {
return attemptId;
}
/**
* Sets the value of the 'attemptId' field.
* @param value The value of 'attemptId'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder setAttemptId(java.lang.CharSequence value) {
validate(fields()[1], value);
this.attemptId = value;
fieldSetFlags()[1] = true;
return this;
}
/**
* Checks whether the 'attemptId' field has been set.
* @return True if the 'attemptId' field has been set, false otherwise.
*/
public boolean hasAttemptId() {
return fieldSetFlags()[1];
}
/**
* Clears the value of the 'attemptId' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder clearAttemptId() {
attemptId = null;
fieldSetFlags()[1] = false;
return this;
}
/**
* Gets the value of the 'taskType' field.
* @return The value.
*/
public java.lang.CharSequence getTaskType() {
return taskType;
}
/**
* Sets the value of the 'taskType' field.
* @param value The value of 'taskType'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder setTaskType(java.lang.CharSequence value) {
validate(fields()[2], value);
this.taskType = value;
fieldSetFlags()[2] = true;
return this;
}
/**
* Checks whether the 'taskType' field has been set.
* @return True if the 'taskType' field has been set, false otherwise.
*/
public boolean hasTaskType() {
return fieldSetFlags()[2];
}
/**
* Clears the value of the 'taskType' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder clearTaskType() {
taskType = null;
fieldSetFlags()[2] = false;
return this;
}
/**
* Gets the value of the 'taskStatus' field.
* @return The value.
*/
public java.lang.CharSequence getTaskStatus() {
return taskStatus;
}
/**
* Sets the value of the 'taskStatus' field.
* @param value The value of 'taskStatus'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder setTaskStatus(java.lang.CharSequence value) {
validate(fields()[3], value);
this.taskStatus = value;
fieldSetFlags()[3] = true;
return this;
}
/**
* Checks whether the 'taskStatus' field has been set.
* @return True if the 'taskStatus' field has been set, false otherwise.
*/
public boolean hasTaskStatus() {
return fieldSetFlags()[3];
}
/**
* Clears the value of the 'taskStatus' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder clearTaskStatus() {
taskStatus = null;
fieldSetFlags()[3] = false;
return this;
}
/**
* Gets the value of the 'shuffleFinishTime' field.
* @return The value.
*/
public long getShuffleFinishTime() {
return shuffleFinishTime;
}
/**
* Sets the value of the 'shuffleFinishTime' field.
* @param value The value of 'shuffleFinishTime'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder setShuffleFinishTime(long value) {
validate(fields()[4], value);
this.shuffleFinishTime = value;
fieldSetFlags()[4] = true;
return this;
}
/**
* Checks whether the 'shuffleFinishTime' field has been set.
* @return True if the 'shuffleFinishTime' field has been set, false otherwise.
*/
public boolean hasShuffleFinishTime() {
return fieldSetFlags()[4];
}
/**
* Clears the value of the 'shuffleFinishTime' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder clearShuffleFinishTime() {
fieldSetFlags()[4] = false;
return this;
}
/**
* Gets the value of the 'sortFinishTime' field.
* @return The value.
*/
public long getSortFinishTime() {
return sortFinishTime;
}
/**
* Sets the value of the 'sortFinishTime' field.
* @param value The value of 'sortFinishTime'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder setSortFinishTime(long value) {
validate(fields()[5], value);
this.sortFinishTime = value;
fieldSetFlags()[5] = true;
return this;
}
/**
* Checks whether the 'sortFinishTime' field has been set.
* @return True if the 'sortFinishTime' field has been set, false otherwise.
*/
public boolean hasSortFinishTime() {
return fieldSetFlags()[5];
}
/**
* Clears the value of the 'sortFinishTime' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder clearSortFinishTime() {
fieldSetFlags()[5] = false;
return this;
}
/**
* Gets the value of the 'finishTime' field.
* @return The value.
*/
public long getFinishTime() {
return finishTime;
}
/**
* Sets the value of the 'finishTime' field.
* @param value The value of 'finishTime'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder setFinishTime(long value) {
validate(fields()[6], value);
this.finishTime = value;
fieldSetFlags()[6] = true;
return this;
}
/**
* Checks whether the 'finishTime' field has been set.
* @return True if the 'finishTime' field has been set, false otherwise.
*/
public boolean hasFinishTime() {
return fieldSetFlags()[6];
}
/**
* Clears the value of the 'finishTime' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder clearFinishTime() {
fieldSetFlags()[6] = false;
return this;
}
/**
* Gets the value of the 'hostname' field.
* @return The value.
*/
public java.lang.CharSequence getHostname() {
return hostname;
}
/**
* Sets the value of the 'hostname' field.
* @param value The value of 'hostname'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder setHostname(java.lang.CharSequence value) {
validate(fields()[7], value);
this.hostname = value;
fieldSetFlags()[7] = true;
return this;
}
/**
* Checks whether the 'hostname' field has been set.
* @return True if the 'hostname' field has been set, false otherwise.
*/
public boolean hasHostname() {
return fieldSetFlags()[7];
}
/**
* Clears the value of the 'hostname' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder clearHostname() {
hostname = null;
fieldSetFlags()[7] = false;
return this;
}
/**
* Gets the value of the 'port' field.
* @return The value.
*/
public int getPort() {
return port;
}
/**
* Sets the value of the 'port' field.
* @param value The value of 'port'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder setPort(int value) {
validate(fields()[8], value);
this.port = value;
fieldSetFlags()[8] = true;
return this;
}
/**
* Checks whether the 'port' field has been set.
* @return True if the 'port' field has been set, false otherwise.
*/
public boolean hasPort() {
return fieldSetFlags()[8];
}
/**
* Clears the value of the 'port' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder clearPort() {
fieldSetFlags()[8] = false;
return this;
}
/**
* Gets the value of the 'rackname' field.
* @return The value.
*/
public java.lang.CharSequence getRackname() {
return rackname;
}
/**
* Sets the value of the 'rackname' field.
* @param value The value of 'rackname'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder setRackname(java.lang.CharSequence value) {
validate(fields()[9], value);
this.rackname = value;
fieldSetFlags()[9] = true;
return this;
}
/**
* Checks whether the 'rackname' field has been set.
* @return True if the 'rackname' field has been set, false otherwise.
*/
public boolean hasRackname() {
return fieldSetFlags()[9];
}
/**
* Clears the value of the 'rackname' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder clearRackname() {
rackname = null;
fieldSetFlags()[9] = false;
return this;
}
/**
* Gets the value of the 'state' field.
* @return The value.
*/
public java.lang.CharSequence getState() {
return state;
}
/**
* Sets the value of the 'state' field.
* @param value The value of 'state'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder setState(java.lang.CharSequence value) {
validate(fields()[10], value);
this.state = value;
fieldSetFlags()[10] = true;
return this;
}
/**
* Checks whether the 'state' field has been set.
* @return True if the 'state' field has been set, false otherwise.
*/
public boolean hasState() {
return fieldSetFlags()[10];
}
/**
* Clears the value of the 'state' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder clearState() {
state = null;
fieldSetFlags()[10] = false;
return this;
}
/**
* Gets the value of the 'counters' field.
* @return The value.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters getCounters() {
return counters;
}
/**
* Sets the value of the 'counters' field.
* @param value The value of 'counters'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder setCounters(org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters value) {
validate(fields()[11], value);
this.countersBuilder = null;
this.counters = value;
fieldSetFlags()[11] = true;
return this;
}
/**
* Checks whether the 'counters' field has been set.
* @return True if the 'counters' field has been set, false otherwise.
*/
public boolean hasCounters() {
return fieldSetFlags()[11];
}
/**
* Gets the Builder instance for the 'counters' field and creates one if it doesn't exist yet.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters.Builder getCountersBuilder() {
if (countersBuilder == null) {
if (hasCounters()) {
setCountersBuilder(org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters.newBuilder(counters));
} else {
setCountersBuilder(org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters.newBuilder());
}
}
return countersBuilder;
}
/**
* Sets the Builder instance for the 'counters' field
* @param value The builder instance that must be set.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder setCountersBuilder(org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters.Builder value) {
clearCounters();
countersBuilder = value;
return this;
}
/**
* Checks whether the 'counters' field has an active Builder instance
* @return True if the 'counters' field has an active Builder instance
*/
public boolean hasCountersBuilder() {
return countersBuilder != null;
}
/**
* Clears the value of the 'counters' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder clearCounters() {
counters = null;
countersBuilder = null;
fieldSetFlags()[11] = false;
return this;
}
/**
* Gets the value of the 'clockSplits' field.
* @return The value.
*/
public java.util.List getClockSplits() {
return clockSplits;
}
/**
* Sets the value of the 'clockSplits' field.
* @param value The value of 'clockSplits'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder setClockSplits(java.util.List value) {
validate(fields()[12], value);
this.clockSplits = value;
fieldSetFlags()[12] = true;
return this;
}
/**
* Checks whether the 'clockSplits' field has been set.
* @return True if the 'clockSplits' field has been set, false otherwise.
*/
public boolean hasClockSplits() {
return fieldSetFlags()[12];
}
/**
* Clears the value of the 'clockSplits' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder clearClockSplits() {
clockSplits = null;
fieldSetFlags()[12] = false;
return this;
}
/**
* Gets the value of the 'cpuUsages' field.
* @return The value.
*/
public java.util.List getCpuUsages() {
return cpuUsages;
}
/**
* Sets the value of the 'cpuUsages' field.
* @param value The value of 'cpuUsages'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder setCpuUsages(java.util.List value) {
validate(fields()[13], value);
this.cpuUsages = value;
fieldSetFlags()[13] = true;
return this;
}
/**
* Checks whether the 'cpuUsages' field has been set.
* @return True if the 'cpuUsages' field has been set, false otherwise.
*/
public boolean hasCpuUsages() {
return fieldSetFlags()[13];
}
/**
* Clears the value of the 'cpuUsages' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder clearCpuUsages() {
cpuUsages = null;
fieldSetFlags()[13] = false;
return this;
}
/**
* Gets the value of the 'vMemKbytes' field.
* @return The value.
*/
public java.util.List getVMemKbytes() {
return vMemKbytes;
}
/**
* Sets the value of the 'vMemKbytes' field.
* @param value The value of 'vMemKbytes'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder setVMemKbytes(java.util.List value) {
validate(fields()[14], value);
this.vMemKbytes = value;
fieldSetFlags()[14] = true;
return this;
}
/**
* Checks whether the 'vMemKbytes' field has been set.
* @return True if the 'vMemKbytes' field has been set, false otherwise.
*/
public boolean hasVMemKbytes() {
return fieldSetFlags()[14];
}
/**
* Clears the value of the 'vMemKbytes' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder clearVMemKbytes() {
vMemKbytes = null;
fieldSetFlags()[14] = false;
return this;
}
/**
* Gets the value of the 'physMemKbytes' field.
* @return The value.
*/
public java.util.List getPhysMemKbytes() {
return physMemKbytes;
}
/**
* Sets the value of the 'physMemKbytes' field.
* @param value The value of 'physMemKbytes'.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder setPhysMemKbytes(java.util.List value) {
validate(fields()[15], value);
this.physMemKbytes = value;
fieldSetFlags()[15] = true;
return this;
}
/**
* Checks whether the 'physMemKbytes' field has been set.
* @return True if the 'physMemKbytes' field has been set, false otherwise.
*/
public boolean hasPhysMemKbytes() {
return fieldSetFlags()[15];
}
/**
* Clears the value of the 'physMemKbytes' field.
* @return This builder.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder clearPhysMemKbytes() {
physMemKbytes = null;
fieldSetFlags()[15] = false;
return this;
}
@Override
@SuppressWarnings("unchecked")
public ReduceAttemptFinished build() {
try {
ReduceAttemptFinished record = new ReduceAttemptFinished();
record.taskid = fieldSetFlags()[0] ? this.taskid : (java.lang.CharSequence) defaultValue(fields()[0]);
record.attemptId = fieldSetFlags()[1] ? this.attemptId : (java.lang.CharSequence) defaultValue(fields()[1]);
record.taskType = fieldSetFlags()[2] ? this.taskType : (java.lang.CharSequence) defaultValue(fields()[2]);
record.taskStatus = fieldSetFlags()[3] ? this.taskStatus : (java.lang.CharSequence) defaultValue(fields()[3]);
record.shuffleFinishTime = fieldSetFlags()[4] ? this.shuffleFinishTime : (java.lang.Long) defaultValue(fields()[4]);
record.sortFinishTime = fieldSetFlags()[5] ? this.sortFinishTime : (java.lang.Long) defaultValue(fields()[5]);
record.finishTime = fieldSetFlags()[6] ? this.finishTime : (java.lang.Long) defaultValue(fields()[6]);
record.hostname = fieldSetFlags()[7] ? this.hostname : (java.lang.CharSequence) defaultValue(fields()[7]);
record.port = fieldSetFlags()[8] ? this.port : (java.lang.Integer) defaultValue(fields()[8]);
record.rackname = fieldSetFlags()[9] ? this.rackname : (java.lang.CharSequence) defaultValue(fields()[9]);
record.state = fieldSetFlags()[10] ? this.state : (java.lang.CharSequence) defaultValue(fields()[10]);
if (countersBuilder != null) {
try {
record.counters = this.countersBuilder.build();
} catch (org.apache.hadoop.shaded.org.apache.avro.AvroMissingFieldException e) {
e.addParentField(record.getSchema().getField("counters"));
throw e;
}
} else {
record.counters = fieldSetFlags()[11] ? this.counters : (org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters) defaultValue(fields()[11]);
}
record.clockSplits = fieldSetFlags()[12] ? this.clockSplits : (java.util.List) defaultValue(fields()[12]);
record.cpuUsages = fieldSetFlags()[13] ? this.cpuUsages : (java.util.List) defaultValue(fields()[13]);
record.vMemKbytes = fieldSetFlags()[14] ? this.vMemKbytes : (java.util.List) defaultValue(fields()[14]);
record.physMemKbytes = fieldSetFlags()[15] ? this.physMemKbytes : (java.util.List) defaultValue(fields()[15]);
return record;
} catch (org.apache.hadoop.shaded.org.apache.avro.AvroMissingFieldException e) {
throw e;
} catch (java.lang.Exception e) {
throw new org.apache.avro.AvroRuntimeException(e);
}
}
}
@SuppressWarnings("unchecked")
private static final org.apache.hadoop.shaded.org.apache.avro.io.DatumWriter
WRITER$ = (org.apache.hadoop.shaded.org.apache.avro.io.DatumWriter)MODEL$.createDatumWriter(SCHEMA$);
@Override public void writeExternal(java.io.ObjectOutput out)
throws java.io.IOException {
WRITER$.write(this, SpecificData.getEncoder(out));
}
@SuppressWarnings("unchecked")
private static final org.apache.hadoop.shaded.org.apache.avro.io.DatumReader
READER$ = (org.apache.hadoop.shaded.org.apache.avro.io.DatumReader)MODEL$.createDatumReader(SCHEMA$);
@Override public void readExternal(java.io.ObjectInput in)
throws java.io.IOException {
READER$.read(this, SpecificData.getDecoder(in));
}
@Override protected boolean hasCustomCoders() { return true; }
@Override public void customEncode(org.apache.hadoop.shaded.org.apache.avro.io.Encoder out)
throws java.io.IOException
{
out.writeString(this.taskid);
out.writeString(this.attemptId);
out.writeString(this.taskType);
out.writeString(this.taskStatus);
out.writeLong(this.shuffleFinishTime);
out.writeLong(this.sortFinishTime);
out.writeLong(this.finishTime);
out.writeString(this.hostname);
out.writeInt(this.port);
out.writeString(this.rackname);
out.writeString(this.state);
this.counters.customEncode(out);
long size0 = this.clockSplits.size();
out.writeArrayStart();
out.setItemCount(size0);
long actualSize0 = 0;
for (java.lang.Integer e0: this.clockSplits) {
actualSize0++;
out.startItem();
out.writeInt(e0);
}
out.writeArrayEnd();
if (actualSize0 != size0)
throw new java.util.ConcurrentModificationException("Array-size written was " + size0 + ", but element count was " + actualSize0 + ".");
long size1 = this.cpuUsages.size();
out.writeArrayStart();
out.setItemCount(size1);
long actualSize1 = 0;
for (java.lang.Integer e1: this.cpuUsages) {
actualSize1++;
out.startItem();
out.writeInt(e1);
}
out.writeArrayEnd();
if (actualSize1 != size1)
throw new java.util.ConcurrentModificationException("Array-size written was " + size1 + ", but element count was " + actualSize1 + ".");
long size2 = this.vMemKbytes.size();
out.writeArrayStart();
out.setItemCount(size2);
long actualSize2 = 0;
for (java.lang.Integer e2: this.vMemKbytes) {
actualSize2++;
out.startItem();
out.writeInt(e2);
}
out.writeArrayEnd();
if (actualSize2 != size2)
throw new java.util.ConcurrentModificationException("Array-size written was " + size2 + ", but element count was " + actualSize2 + ".");
long size3 = this.physMemKbytes.size();
out.writeArrayStart();
out.setItemCount(size3);
long actualSize3 = 0;
for (java.lang.Integer e3: this.physMemKbytes) {
actualSize3++;
out.startItem();
out.writeInt(e3);
}
out.writeArrayEnd();
if (actualSize3 != size3)
throw new java.util.ConcurrentModificationException("Array-size written was " + size3 + ", but element count was " + actualSize3 + ".");
}
@Override public void customDecode(org.apache.hadoop.shaded.org.apache.avro.io.ResolvingDecoder in)
throws java.io.IOException
{
org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff();
if (fieldOrder == null) {
this.taskid = in.readString(this.taskid instanceof Utf8 ? (Utf8)this.taskid : null);
this.attemptId = in.readString(this.attemptId instanceof Utf8 ? (Utf8)this.attemptId : null);
this.taskType = in.readString(this.taskType instanceof Utf8 ? (Utf8)this.taskType : null);
this.taskStatus = in.readString(this.taskStatus instanceof Utf8 ? (Utf8)this.taskStatus : null);
this.shuffleFinishTime = in.readLong();
this.sortFinishTime = in.readLong();
this.finishTime = in.readLong();
this.hostname = in.readString(this.hostname instanceof Utf8 ? (Utf8)this.hostname : null);
this.port = in.readInt();
this.rackname = in.readString(this.rackname instanceof Utf8 ? (Utf8)this.rackname : null);
this.state = in.readString(this.state instanceof Utf8 ? (Utf8)this.state : null);
if (this.counters == null) {
this.counters = new org.apache.hadoop.mapreduce.jobhistory.JhCounters();
}
this.counters.customDecode(in);
long size0 = in.readArrayStart();
java.util.List a0 = this.clockSplits;
if (a0 == null) {
a0 = new SpecificData.Array((int)size0, SCHEMA$.getField("clockSplits").schema());
this.clockSplits = a0;
} else a0.clear();
SpecificData.Array ga0 = (a0 instanceof SpecificData.Array ? (SpecificData.Array)a0 : null);
for ( ; 0 < size0; size0 = in.arrayNext()) {
for ( ; size0 != 0; size0--) {
java.lang.Integer e0 = (ga0 != null ? ga0.peek() : null);
e0 = in.readInt();
a0.add(e0);
}
}
long size1 = in.readArrayStart();
java.util.List a1 = this.cpuUsages;
if (a1 == null) {
a1 = new SpecificData.Array((int)size1, SCHEMA$.getField("cpuUsages").schema());
this.cpuUsages = a1;
} else a1.clear();
SpecificData.Array ga1 = (a1 instanceof SpecificData.Array ? (SpecificData.Array)a1 : null);
for ( ; 0 < size1; size1 = in.arrayNext()) {
for ( ; size1 != 0; size1--) {
java.lang.Integer e1 = (ga1 != null ? ga1.peek() : null);
e1 = in.readInt();
a1.add(e1);
}
}
long size2 = in.readArrayStart();
java.util.List a2 = this.vMemKbytes;
if (a2 == null) {
a2 = new SpecificData.Array((int)size2, SCHEMA$.getField("vMemKbytes").schema());
this.vMemKbytes = a2;
} else a2.clear();
SpecificData.Array ga2 = (a2 instanceof SpecificData.Array ? (SpecificData.Array)a2 : null);
for ( ; 0 < size2; size2 = in.arrayNext()) {
for ( ; size2 != 0; size2--) {
java.lang.Integer e2 = (ga2 != null ? ga2.peek() : null);
e2 = in.readInt();
a2.add(e2);
}
}
long size3 = in.readArrayStart();
java.util.List a3 = this.physMemKbytes;
if (a3 == null) {
a3 = new SpecificData.Array((int)size3, SCHEMA$.getField("physMemKbytes").schema());
this.physMemKbytes = a3;
} else a3.clear();
SpecificData.Array ga3 = (a3 instanceof SpecificData.Array ? (SpecificData.Array)a3 : null);
for ( ; 0 < size3; size3 = in.arrayNext()) {
for ( ; size3 != 0; size3--) {
java.lang.Integer e3 = (ga3 != null ? ga3.peek() : null);
e3 = in.readInt();
a3.add(e3);
}
}
} else {
for (int i = 0; i < 16; i++) {
switch (fieldOrder[i].pos()) {
case 0:
this.taskid = in.readString(this.taskid instanceof Utf8 ? (Utf8)this.taskid : null);
break;
case 1:
this.attemptId = in.readString(this.attemptId instanceof Utf8 ? (Utf8)this.attemptId : null);
break;
case 2:
this.taskType = in.readString(this.taskType instanceof Utf8 ? (Utf8)this.taskType : null);
break;
case 3:
this.taskStatus = in.readString(this.taskStatus instanceof Utf8 ? (Utf8)this.taskStatus : null);
break;
case 4:
this.shuffleFinishTime = in.readLong();
break;
case 5:
this.sortFinishTime = in.readLong();
break;
case 6:
this.finishTime = in.readLong();
break;
case 7:
this.hostname = in.readString(this.hostname instanceof Utf8 ? (Utf8)this.hostname : null);
break;
case 8:
this.port = in.readInt();
break;
case 9:
this.rackname = in.readString(this.rackname instanceof Utf8 ? (Utf8)this.rackname : null);
break;
case 10:
this.state = in.readString(this.state instanceof Utf8 ? (Utf8)this.state : null);
break;
case 11:
if (this.counters == null) {
this.counters = new org.apache.hadoop.mapreduce.jobhistory.JhCounters();
}
this.counters.customDecode(in);
break;
case 12:
long size0 = in.readArrayStart();
java.util.List a0 = this.clockSplits;
if (a0 == null) {
a0 = new SpecificData.Array((int)size0, SCHEMA$.getField("clockSplits").schema());
this.clockSplits = a0;
} else a0.clear();
SpecificData.Array ga0 = (a0 instanceof SpecificData.Array ? (SpecificData.Array)a0 : null);
for ( ; 0 < size0; size0 = in.arrayNext()) {
for ( ; size0 != 0; size0--) {
java.lang.Integer e0 = (ga0 != null ? ga0.peek() : null);
e0 = in.readInt();
a0.add(e0);
}
}
break;
case 13:
long size1 = in.readArrayStart();
java.util.List a1 = this.cpuUsages;
if (a1 == null) {
a1 = new SpecificData.Array((int)size1, SCHEMA$.getField("cpuUsages").schema());
this.cpuUsages = a1;
} else a1.clear();
SpecificData.Array ga1 = (a1 instanceof SpecificData.Array ? (SpecificData.Array)a1 : null);
for ( ; 0 < size1; size1 = in.arrayNext()) {
for ( ; size1 != 0; size1--) {
java.lang.Integer e1 = (ga1 != null ? ga1.peek() : null);
e1 = in.readInt();
a1.add(e1);
}
}
break;
case 14:
long size2 = in.readArrayStart();
java.util.List a2 = this.vMemKbytes;
if (a2 == null) {
a2 = new SpecificData.Array((int)size2, SCHEMA$.getField("vMemKbytes").schema());
this.vMemKbytes = a2;
} else a2.clear();
SpecificData.Array ga2 = (a2 instanceof SpecificData.Array ? (SpecificData.Array)a2 : null);
for ( ; 0 < size2; size2 = in.arrayNext()) {
for ( ; size2 != 0; size2--) {
java.lang.Integer e2 = (ga2 != null ? ga2.peek() : null);
e2 = in.readInt();
a2.add(e2);
}
}
break;
case 15:
long size3 = in.readArrayStart();
java.util.List a3 = this.physMemKbytes;
if (a3 == null) {
a3 = new SpecificData.Array((int)size3, SCHEMA$.getField("physMemKbytes").schema());
this.physMemKbytes = a3;
} else a3.clear();
SpecificData.Array ga3 = (a3 instanceof SpecificData.Array ? (SpecificData.Array)a3 : null);
for ( ; 0 < size3; size3 = in.arrayNext()) {
for ( ; size3 != 0; size3--) {
java.lang.Integer e3 = (ga3 != null ? ga3.peek() : null);
e3 = in.readInt();
a3.add(e3);
}
}
break;
default:
throw new java.io.IOException("Corrupt ResolvingDecoder.");
}
}
}
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy