org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion Maven / Gradle / Ivy
/**
* Autogenerated by Avro
*
* DO NOT EDIT DIRECTLY
*/
package org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory;
@SuppressWarnings("all")
@org.apache.hadoop.shaded.org.apache.avro.specific.AvroGenerated
public class TaskAttemptUnsuccessfulCompletion extends org.apache.hadoop.shaded.org.apache.avro.specific.SpecificRecordBase implements org.apache.hadoop.shaded.org.apache.avro.specific.SpecificRecord {
public static final org.apache.hadoop.shaded.org.apache.avro.Schema SCHEMA$ = new org.apache.hadoop.shaded.org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"TaskAttemptUnsuccessfulCompletion\",\"namespace\":\"org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory\",\"fields\":[{\"name\":\"taskid\",\"type\":\"string\"},{\"name\":\"taskType\",\"type\":\"string\"},{\"name\":\"attemptId\",\"type\":\"string\"},{\"name\":\"finishTime\",\"type\":\"long\"},{\"name\":\"hostname\",\"type\":\"string\"},{\"name\":\"port\",\"type\":\"int\"},{\"name\":\"rackname\",\"type\":\"string\"},{\"name\":\"status\",\"type\":\"string\"},{\"name\":\"error\",\"type\":\"string\"},{\"name\":\"counters\",\"type\":[\"null\",{\"type\":\"record\",\"name\":\"JhCounters\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"groups\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"JhCounterGroup\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"displayName\",\"type\":\"string\"},{\"name\":\"counts\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"JhCounter\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"displayName\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"long\"}]}}}]}}}]}],\"default\":null},{\"name\":\"clockSplits\",\"type\":{\"type\":\"array\",\"items\":\"int\"}},{\"name\":\"cpuUsages\",\"type\":{\"type\":\"array\",\"items\":\"int\"}},{\"name\":\"vMemKbytes\",\"type\":{\"type\":\"array\",\"items\":\"int\"}},{\"name\":\"physMemKbytes\",\"type\":{\"type\":\"array\",\"items\":\"int\"}}]}");
public static org.apache.hadoop.shaded.org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
@Deprecated public java.lang.CharSequence taskid;
@Deprecated public java.lang.CharSequence taskType;
@Deprecated public java.lang.CharSequence attemptId;
@Deprecated public long finishTime;
@Deprecated public java.lang.CharSequence hostname;
@Deprecated public int port;
@Deprecated public java.lang.CharSequence rackname;
@Deprecated public java.lang.CharSequence status;
@Deprecated public java.lang.CharSequence error;
@Deprecated public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters counters;
@Deprecated public java.util.List clockSplits;
@Deprecated public java.util.List cpuUsages;
@Deprecated public java.util.List vMemKbytes;
@Deprecated public java.util.List physMemKbytes;
/**
* Default constructor. Note that this does not initialize fields
* to their default values from the schema. If that is desired then
* one should use newBuilder()
.
*/
public TaskAttemptUnsuccessfulCompletion() {}
/**
* All-args constructor.
*/
public TaskAttemptUnsuccessfulCompletion(java.lang.CharSequence taskid, java.lang.CharSequence taskType, java.lang.CharSequence attemptId, java.lang.Long finishTime, java.lang.CharSequence hostname, java.lang.Integer port, java.lang.CharSequence rackname, java.lang.CharSequence status, java.lang.CharSequence error, org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters counters, java.util.List clockSplits, java.util.List cpuUsages, java.util.List vMemKbytes, java.util.List physMemKbytes) {
this.taskid = taskid;
this.taskType = taskType;
this.attemptId = attemptId;
this.finishTime = finishTime;
this.hostname = hostname;
this.port = port;
this.rackname = rackname;
this.status = status;
this.error = error;
this.counters = counters;
this.clockSplits = clockSplits;
this.cpuUsages = cpuUsages;
this.vMemKbytes = vMemKbytes;
this.physMemKbytes = physMemKbytes;
}
public org.apache.hadoop.shaded.org.apache.avro.Schema getSchema() { return SCHEMA$; }
// Used by DatumWriter. Applications should not call.
public java.lang.Object get(int field$) {
switch (field$) {
case 0: return taskid;
case 1: return taskType;
case 2: return attemptId;
case 3: return finishTime;
case 4: return hostname;
case 5: return port;
case 6: return rackname;
case 7: return status;
case 8: return error;
case 9: return counters;
case 10: return clockSplits;
case 11: return cpuUsages;
case 12: return vMemKbytes;
case 13: return physMemKbytes;
default: throw new org.apache.hadoop.shaded.org.apache.avro.AvroRuntimeException("Bad index");
}
}
// Used by DatumReader. Applications should not call.
@SuppressWarnings(value="unchecked")
public void put(int field$, java.lang.Object value$) {
switch (field$) {
case 0: taskid = (java.lang.CharSequence)value$; break;
case 1: taskType = (java.lang.CharSequence)value$; break;
case 2: attemptId = (java.lang.CharSequence)value$; break;
case 3: finishTime = (java.lang.Long)value$; break;
case 4: hostname = (java.lang.CharSequence)value$; break;
case 5: port = (java.lang.Integer)value$; break;
case 6: rackname = (java.lang.CharSequence)value$; break;
case 7: status = (java.lang.CharSequence)value$; break;
case 8: error = (java.lang.CharSequence)value$; break;
case 9: counters = (org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters)value$; break;
case 10: clockSplits = (java.util.List)value$; break;
case 11: cpuUsages = (java.util.List)value$; break;
case 12: vMemKbytes = (java.util.List)value$; break;
case 13: physMemKbytes = (java.util.List)value$; break;
default: throw new org.apache.hadoop.shaded.org.apache.avro.AvroRuntimeException("Bad index");
}
}
/**
* Gets the value of the 'taskid' field.
*/
public java.lang.CharSequence getTaskid() {
return taskid;
}
/**
* Sets the value of the 'taskid' field.
* @param value the value to set.
*/
public void setTaskid(java.lang.CharSequence value) {
this.taskid = value;
}
/**
* Gets the value of the 'taskType' field.
*/
public java.lang.CharSequence getTaskType() {
return taskType;
}
/**
* Sets the value of the 'taskType' field.
* @param value the value to set.
*/
public void setTaskType(java.lang.CharSequence value) {
this.taskType = value;
}
/**
* Gets the value of the 'attemptId' field.
*/
public java.lang.CharSequence getAttemptId() {
return attemptId;
}
/**
* Sets the value of the 'attemptId' field.
* @param value the value to set.
*/
public void setAttemptId(java.lang.CharSequence value) {
this.attemptId = value;
}
/**
* Gets the value of the 'finishTime' field.
*/
public java.lang.Long getFinishTime() {
return finishTime;
}
/**
* Sets the value of the 'finishTime' field.
* @param value the value to set.
*/
public void setFinishTime(java.lang.Long value) {
this.finishTime = value;
}
/**
* Gets the value of the 'hostname' field.
*/
public java.lang.CharSequence getHostname() {
return hostname;
}
/**
* Sets the value of the 'hostname' field.
* @param value the value to set.
*/
public void setHostname(java.lang.CharSequence value) {
this.hostname = value;
}
/**
* Gets the value of the 'port' field.
*/
public java.lang.Integer getPort() {
return port;
}
/**
* Sets the value of the 'port' field.
* @param value the value to set.
*/
public void setPort(java.lang.Integer value) {
this.port = value;
}
/**
* Gets the value of the 'rackname' field.
*/
public java.lang.CharSequence getRackname() {
return rackname;
}
/**
* Sets the value of the 'rackname' field.
* @param value the value to set.
*/
public void setRackname(java.lang.CharSequence value) {
this.rackname = value;
}
/**
* Gets the value of the 'status' field.
*/
public java.lang.CharSequence getStatus() {
return status;
}
/**
* Sets the value of the 'status' field.
* @param value the value to set.
*/
public void setStatus(java.lang.CharSequence value) {
this.status = value;
}
/**
* Gets the value of the 'error' field.
*/
public java.lang.CharSequence getError() {
return error;
}
/**
* Sets the value of the 'error' field.
* @param value the value to set.
*/
public void setError(java.lang.CharSequence value) {
this.error = value;
}
/**
* Gets the value of the 'counters' field.
*/
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters getCounters() {
return counters;
}
/**
* Sets the value of the 'counters' field.
* @param value the value to set.
*/
public void setCounters(org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters value) {
this.counters = value;
}
/**
* Gets the value of the 'clockSplits' field.
*/
public java.util.List getClockSplits() {
return clockSplits;
}
/**
* Sets the value of the 'clockSplits' field.
* @param value the value to set.
*/
public void setClockSplits(java.util.List value) {
this.clockSplits = value;
}
/**
* Gets the value of the 'cpuUsages' field.
*/
public java.util.List getCpuUsages() {
return cpuUsages;
}
/**
* Sets the value of the 'cpuUsages' field.
* @param value the value to set.
*/
public void setCpuUsages(java.util.List value) {
this.cpuUsages = value;
}
/**
* Gets the value of the 'vMemKbytes' field.
*/
public java.util.List getVMemKbytes() {
return vMemKbytes;
}
/**
* Sets the value of the 'vMemKbytes' field.
* @param value the value to set.
*/
public void setVMemKbytes(java.util.List value) {
this.vMemKbytes = value;
}
/**
* Gets the value of the 'physMemKbytes' field.
*/
public java.util.List getPhysMemKbytes() {
return physMemKbytes;
}
/**
* Sets the value of the 'physMemKbytes' field.
* @param value the value to set.
*/
public void setPhysMemKbytes(java.util.List value) {
this.physMemKbytes = value;
}
/** Creates a new TaskAttemptUnsuccessfulCompletion RecordBuilder */
public static org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder newBuilder() {
return new org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder();
}
/** Creates a new TaskAttemptUnsuccessfulCompletion RecordBuilder by copying an existing Builder */
public static org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder other) {
return new org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder(other);
}
/** Creates a new TaskAttemptUnsuccessfulCompletion RecordBuilder by copying an existing TaskAttemptUnsuccessfulCompletion instance */
public static org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion other) {
return new org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder(other);
}
/**
* RecordBuilder for TaskAttemptUnsuccessfulCompletion instances.
*/
public static class Builder extends org.apache.hadoop.shaded.org.apache.avro.specific.SpecificRecordBuilderBase
implements org.apache.hadoop.shaded.org.apache.avro.data.RecordBuilder {
private java.lang.CharSequence taskid;
private java.lang.CharSequence taskType;
private java.lang.CharSequence attemptId;
private long finishTime;
private java.lang.CharSequence hostname;
private int port;
private java.lang.CharSequence rackname;
private java.lang.CharSequence status;
private java.lang.CharSequence error;
private org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters counters;
private java.util.List clockSplits;
private java.util.List cpuUsages;
private java.util.List vMemKbytes;
private java.util.List physMemKbytes;
/** Creates a new Builder */
private Builder() {
super(org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.SCHEMA$);
}
/** Creates a Builder by copying an existing Builder */
private Builder(org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder other) {
super(other);
if (isValidValue(fields()[0], other.taskid)) {
this.taskid = data().deepCopy(fields()[0].schema(), other.taskid);
fieldSetFlags()[0] = true;
}
if (isValidValue(fields()[1], other.taskType)) {
this.taskType = data().deepCopy(fields()[1].schema(), other.taskType);
fieldSetFlags()[1] = true;
}
if (isValidValue(fields()[2], other.attemptId)) {
this.attemptId = data().deepCopy(fields()[2].schema(), other.attemptId);
fieldSetFlags()[2] = true;
}
if (isValidValue(fields()[3], other.finishTime)) {
this.finishTime = data().deepCopy(fields()[3].schema(), other.finishTime);
fieldSetFlags()[3] = true;
}
if (isValidValue(fields()[4], other.hostname)) {
this.hostname = data().deepCopy(fields()[4].schema(), other.hostname);
fieldSetFlags()[4] = true;
}
if (isValidValue(fields()[5], other.port)) {
this.port = data().deepCopy(fields()[5].schema(), other.port);
fieldSetFlags()[5] = true;
}
if (isValidValue(fields()[6], other.rackname)) {
this.rackname = data().deepCopy(fields()[6].schema(), other.rackname);
fieldSetFlags()[6] = true;
}
if (isValidValue(fields()[7], other.status)) {
this.status = data().deepCopy(fields()[7].schema(), other.status);
fieldSetFlags()[7] = true;
}
if (isValidValue(fields()[8], other.error)) {
this.error = data().deepCopy(fields()[8].schema(), other.error);
fieldSetFlags()[8] = true;
}
if (isValidValue(fields()[9], other.counters)) {
this.counters = data().deepCopy(fields()[9].schema(), other.counters);
fieldSetFlags()[9] = true;
}
if (isValidValue(fields()[10], other.clockSplits)) {
this.clockSplits = data().deepCopy(fields()[10].schema(), other.clockSplits);
fieldSetFlags()[10] = true;
}
if (isValidValue(fields()[11], other.cpuUsages)) {
this.cpuUsages = data().deepCopy(fields()[11].schema(), other.cpuUsages);
fieldSetFlags()[11] = true;
}
if (isValidValue(fields()[12], other.vMemKbytes)) {
this.vMemKbytes = data().deepCopy(fields()[12].schema(), other.vMemKbytes);
fieldSetFlags()[12] = true;
}
if (isValidValue(fields()[13], other.physMemKbytes)) {
this.physMemKbytes = data().deepCopy(fields()[13].schema(), other.physMemKbytes);
fieldSetFlags()[13] = true;
}
}
/** Creates a Builder by copying an existing TaskAttemptUnsuccessfulCompletion instance */
private Builder(org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion other) {
super(org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.SCHEMA$);
if (isValidValue(fields()[0], other.taskid)) {
this.taskid = data().deepCopy(fields()[0].schema(), other.taskid);
fieldSetFlags()[0] = true;
}
if (isValidValue(fields()[1], other.taskType)) {
this.taskType = data().deepCopy(fields()[1].schema(), other.taskType);
fieldSetFlags()[1] = true;
}
if (isValidValue(fields()[2], other.attemptId)) {
this.attemptId = data().deepCopy(fields()[2].schema(), other.attemptId);
fieldSetFlags()[2] = true;
}
if (isValidValue(fields()[3], other.finishTime)) {
this.finishTime = data().deepCopy(fields()[3].schema(), other.finishTime);
fieldSetFlags()[3] = true;
}
if (isValidValue(fields()[4], other.hostname)) {
this.hostname = data().deepCopy(fields()[4].schema(), other.hostname);
fieldSetFlags()[4] = true;
}
if (isValidValue(fields()[5], other.port)) {
this.port = data().deepCopy(fields()[5].schema(), other.port);
fieldSetFlags()[5] = true;
}
if (isValidValue(fields()[6], other.rackname)) {
this.rackname = data().deepCopy(fields()[6].schema(), other.rackname);
fieldSetFlags()[6] = true;
}
if (isValidValue(fields()[7], other.status)) {
this.status = data().deepCopy(fields()[7].schema(), other.status);
fieldSetFlags()[7] = true;
}
if (isValidValue(fields()[8], other.error)) {
this.error = data().deepCopy(fields()[8].schema(), other.error);
fieldSetFlags()[8] = true;
}
if (isValidValue(fields()[9], other.counters)) {
this.counters = data().deepCopy(fields()[9].schema(), other.counters);
fieldSetFlags()[9] = true;
}
if (isValidValue(fields()[10], other.clockSplits)) {
this.clockSplits = data().deepCopy(fields()[10].schema(), other.clockSplits);
fieldSetFlags()[10] = true;
}
if (isValidValue(fields()[11], other.cpuUsages)) {
this.cpuUsages = data().deepCopy(fields()[11].schema(), other.cpuUsages);
fieldSetFlags()[11] = true;
}
if (isValidValue(fields()[12], other.vMemKbytes)) {
this.vMemKbytes = data().deepCopy(fields()[12].schema(), other.vMemKbytes);
fieldSetFlags()[12] = true;
}
if (isValidValue(fields()[13], other.physMemKbytes)) {
this.physMemKbytes = data().deepCopy(fields()[13].schema(), other.physMemKbytes);
fieldSetFlags()[13] = true;
}
}
/** Gets the value of the 'taskid' field */
public java.lang.CharSequence getTaskid() {
return taskid;
}
/** Sets the value of the 'taskid' field */
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder setTaskid(java.lang.CharSequence value) {
validate(fields()[0], value);
this.taskid = value;
fieldSetFlags()[0] = true;
return this;
}
/** Checks whether the 'taskid' field has been set */
public boolean hasTaskid() {
return fieldSetFlags()[0];
}
/** Clears the value of the 'taskid' field */
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder clearTaskid() {
taskid = null;
fieldSetFlags()[0] = false;
return this;
}
/** Gets the value of the 'taskType' field */
public java.lang.CharSequence getTaskType() {
return taskType;
}
/** Sets the value of the 'taskType' field */
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder setTaskType(java.lang.CharSequence value) {
validate(fields()[1], value);
this.taskType = value;
fieldSetFlags()[1] = true;
return this;
}
/** Checks whether the 'taskType' field has been set */
public boolean hasTaskType() {
return fieldSetFlags()[1];
}
/** Clears the value of the 'taskType' field */
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder clearTaskType() {
taskType = null;
fieldSetFlags()[1] = false;
return this;
}
/** Gets the value of the 'attemptId' field */
public java.lang.CharSequence getAttemptId() {
return attemptId;
}
/** Sets the value of the 'attemptId' field */
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder setAttemptId(java.lang.CharSequence value) {
validate(fields()[2], value);
this.attemptId = value;
fieldSetFlags()[2] = true;
return this;
}
/** Checks whether the 'attemptId' field has been set */
public boolean hasAttemptId() {
return fieldSetFlags()[2];
}
/** Clears the value of the 'attemptId' field */
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder clearAttemptId() {
attemptId = null;
fieldSetFlags()[2] = false;
return this;
}
/** Gets the value of the 'finishTime' field */
public java.lang.Long getFinishTime() {
return finishTime;
}
/** Sets the value of the 'finishTime' field */
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder setFinishTime(long value) {
validate(fields()[3], value);
this.finishTime = value;
fieldSetFlags()[3] = true;
return this;
}
/** Checks whether the 'finishTime' field has been set */
public boolean hasFinishTime() {
return fieldSetFlags()[3];
}
/** Clears the value of the 'finishTime' field */
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder clearFinishTime() {
fieldSetFlags()[3] = false;
return this;
}
/** Gets the value of the 'hostname' field */
public java.lang.CharSequence getHostname() {
return hostname;
}
/** Sets the value of the 'hostname' field */
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder setHostname(java.lang.CharSequence value) {
validate(fields()[4], value);
this.hostname = value;
fieldSetFlags()[4] = true;
return this;
}
/** Checks whether the 'hostname' field has been set */
public boolean hasHostname() {
return fieldSetFlags()[4];
}
/** Clears the value of the 'hostname' field */
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder clearHostname() {
hostname = null;
fieldSetFlags()[4] = false;
return this;
}
/** Gets the value of the 'port' field */
public java.lang.Integer getPort() {
return port;
}
/** Sets the value of the 'port' field */
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder setPort(int value) {
validate(fields()[5], value);
this.port = value;
fieldSetFlags()[5] = true;
return this;
}
/** Checks whether the 'port' field has been set */
public boolean hasPort() {
return fieldSetFlags()[5];
}
/** Clears the value of the 'port' field */
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder clearPort() {
fieldSetFlags()[5] = false;
return this;
}
/** Gets the value of the 'rackname' field */
public java.lang.CharSequence getRackname() {
return rackname;
}
/** Sets the value of the 'rackname' field */
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder setRackname(java.lang.CharSequence value) {
validate(fields()[6], value);
this.rackname = value;
fieldSetFlags()[6] = true;
return this;
}
/** Checks whether the 'rackname' field has been set */
public boolean hasRackname() {
return fieldSetFlags()[6];
}
/** Clears the value of the 'rackname' field */
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder clearRackname() {
rackname = null;
fieldSetFlags()[6] = false;
return this;
}
/** Gets the value of the 'status' field */
public java.lang.CharSequence getStatus() {
return status;
}
/** Sets the value of the 'status' field */
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder setStatus(java.lang.CharSequence value) {
validate(fields()[7], value);
this.status = value;
fieldSetFlags()[7] = true;
return this;
}
/** Checks whether the 'status' field has been set */
public boolean hasStatus() {
return fieldSetFlags()[7];
}
/** Clears the value of the 'status' field */
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder clearStatus() {
status = null;
fieldSetFlags()[7] = false;
return this;
}
/** Gets the value of the 'error' field */
public java.lang.CharSequence getError() {
return error;
}
/** Sets the value of the 'error' field */
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder setError(java.lang.CharSequence value) {
validate(fields()[8], value);
this.error = value;
fieldSetFlags()[8] = true;
return this;
}
/** Checks whether the 'error' field has been set */
public boolean hasError() {
return fieldSetFlags()[8];
}
/** Clears the value of the 'error' field */
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder clearError() {
error = null;
fieldSetFlags()[8] = false;
return this;
}
/** Gets the value of the 'counters' field */
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters getCounters() {
return counters;
}
/** Sets the value of the 'counters' field */
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder setCounters(org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters value) {
validate(fields()[9], value);
this.counters = value;
fieldSetFlags()[9] = true;
return this;
}
/** Checks whether the 'counters' field has been set */
public boolean hasCounters() {
return fieldSetFlags()[9];
}
/** Clears the value of the 'counters' field */
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder clearCounters() {
counters = null;
fieldSetFlags()[9] = false;
return this;
}
/** Gets the value of the 'clockSplits' field */
public java.util.List getClockSplits() {
return clockSplits;
}
/** Sets the value of the 'clockSplits' field */
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder setClockSplits(java.util.List value) {
validate(fields()[10], value);
this.clockSplits = value;
fieldSetFlags()[10] = true;
return this;
}
/** Checks whether the 'clockSplits' field has been set */
public boolean hasClockSplits() {
return fieldSetFlags()[10];
}
/** Clears the value of the 'clockSplits' field */
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder clearClockSplits() {
clockSplits = null;
fieldSetFlags()[10] = false;
return this;
}
/** Gets the value of the 'cpuUsages' field */
public java.util.List getCpuUsages() {
return cpuUsages;
}
/** Sets the value of the 'cpuUsages' field */
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder setCpuUsages(java.util.List value) {
validate(fields()[11], value);
this.cpuUsages = value;
fieldSetFlags()[11] = true;
return this;
}
/** Checks whether the 'cpuUsages' field has been set */
public boolean hasCpuUsages() {
return fieldSetFlags()[11];
}
/** Clears the value of the 'cpuUsages' field */
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder clearCpuUsages() {
cpuUsages = null;
fieldSetFlags()[11] = false;
return this;
}
/** Gets the value of the 'vMemKbytes' field */
public java.util.List getVMemKbytes() {
return vMemKbytes;
}
/** Sets the value of the 'vMemKbytes' field */
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder setVMemKbytes(java.util.List value) {
validate(fields()[12], value);
this.vMemKbytes = value;
fieldSetFlags()[12] = true;
return this;
}
/** Checks whether the 'vMemKbytes' field has been set */
public boolean hasVMemKbytes() {
return fieldSetFlags()[12];
}
/** Clears the value of the 'vMemKbytes' field */
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder clearVMemKbytes() {
vMemKbytes = null;
fieldSetFlags()[12] = false;
return this;
}
/** Gets the value of the 'physMemKbytes' field */
public java.util.List getPhysMemKbytes() {
return physMemKbytes;
}
/** Sets the value of the 'physMemKbytes' field */
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder setPhysMemKbytes(java.util.List value) {
validate(fields()[13], value);
this.physMemKbytes = value;
fieldSetFlags()[13] = true;
return this;
}
/** Checks whether the 'physMemKbytes' field has been set */
public boolean hasPhysMemKbytes() {
return fieldSetFlags()[13];
}
/** Clears the value of the 'physMemKbytes' field */
public org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion.Builder clearPhysMemKbytes() {
physMemKbytes = null;
fieldSetFlags()[13] = false;
return this;
}
@Override
public TaskAttemptUnsuccessfulCompletion build() {
try {
TaskAttemptUnsuccessfulCompletion record = new TaskAttemptUnsuccessfulCompletion();
record.taskid = fieldSetFlags()[0] ? this.taskid : (java.lang.CharSequence) defaultValue(fields()[0]);
record.taskType = fieldSetFlags()[1] ? this.taskType : (java.lang.CharSequence) defaultValue(fields()[1]);
record.attemptId = fieldSetFlags()[2] ? this.attemptId : (java.lang.CharSequence) defaultValue(fields()[2]);
record.finishTime = fieldSetFlags()[3] ? this.finishTime : (java.lang.Long) defaultValue(fields()[3]);
record.hostname = fieldSetFlags()[4] ? this.hostname : (java.lang.CharSequence) defaultValue(fields()[4]);
record.port = fieldSetFlags()[5] ? this.port : (java.lang.Integer) defaultValue(fields()[5]);
record.rackname = fieldSetFlags()[6] ? this.rackname : (java.lang.CharSequence) defaultValue(fields()[6]);
record.status = fieldSetFlags()[7] ? this.status : (java.lang.CharSequence) defaultValue(fields()[7]);
record.error = fieldSetFlags()[8] ? this.error : (java.lang.CharSequence) defaultValue(fields()[8]);
record.counters = fieldSetFlags()[9] ? this.counters : (org.apache.hadoop.shaded.org.apache.hadoop.mapreduce.jobhistory.JhCounters) defaultValue(fields()[9]);
record.clockSplits = fieldSetFlags()[10] ? this.clockSplits : (java.util.List) defaultValue(fields()[10]);
record.cpuUsages = fieldSetFlags()[11] ? this.cpuUsages : (java.util.List) defaultValue(fields()[11]);
record.vMemKbytes = fieldSetFlags()[12] ? this.vMemKbytes : (java.util.List) defaultValue(fields()[12]);
record.physMemKbytes = fieldSetFlags()[13] ? this.physMemKbytes : (java.util.List) defaultValue(fields()[13]);
return record;
} catch (Exception e) {
throw new org.apache.hadoop.shaded.org.apache.avro.AvroRuntimeException(e);
}
}
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy