org.apache.hadoop.mapreduce.jobhistory.TaskFinished Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of hadoop-apache Show documentation
Show all versions of hadoop-apache Show documentation
Shaded version of Apache Hadoop for Presto
/**
* Autogenerated by Avro
*
* DO NOT EDIT DIRECTLY
*/
package org.apache.hadoop.mapreduce.jobhistory;
@SuppressWarnings("all")
@io.prestosql.hadoop.$internal.org.apache.avro.specific.AvroGenerated
public class TaskFinished extends io.prestosql.hadoop.$internal.org.apache.avro.specific.SpecificRecordBase implements io.prestosql.hadoop.$internal.org.apache.avro.specific.SpecificRecord {
public static final io.prestosql.hadoop.$internal.org.apache.avro.Schema SCHEMA$ = new io.prestosql.hadoop.$internal.org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"TaskFinished\",\"namespace\":\"org.apache.hadoop.mapreduce.jobhistory\",\"fields\":[{\"name\":\"taskid\",\"type\":\"string\"},{\"name\":\"taskType\",\"type\":\"string\"},{\"name\":\"finishTime\",\"type\":\"long\"},{\"name\":\"status\",\"type\":\"string\"},{\"name\":\"counters\",\"type\":{\"type\":\"record\",\"name\":\"JhCounters\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"groups\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"JhCounterGroup\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"displayName\",\"type\":\"string\"},{\"name\":\"counts\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"JhCounter\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"displayName\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"long\"}]}}}]}}}]}},{\"name\":\"successfulAttemptId\",\"type\":[\"null\",\"string\"],\"default\":null}]}");
public static io.prestosql.hadoop.$internal.org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
@Deprecated public java.lang.CharSequence taskid;
@Deprecated public java.lang.CharSequence taskType;
@Deprecated public long finishTime;
@Deprecated public java.lang.CharSequence status;
@Deprecated public org.apache.hadoop.mapreduce.jobhistory.JhCounters counters;
@Deprecated public java.lang.CharSequence successfulAttemptId;
/**
* Default constructor. Note that this does not initialize fields
* to their default values from the schema. If that is desired then
* one should use newBuilder()
.
*/
public TaskFinished() {}
/**
* All-args constructor.
*/
public TaskFinished(java.lang.CharSequence taskid, java.lang.CharSequence taskType, java.lang.Long finishTime, java.lang.CharSequence status, org.apache.hadoop.mapreduce.jobhistory.JhCounters counters, java.lang.CharSequence successfulAttemptId) {
this.taskid = taskid;
this.taskType = taskType;
this.finishTime = finishTime;
this.status = status;
this.counters = counters;
this.successfulAttemptId = successfulAttemptId;
}
public io.prestosql.hadoop.$internal.org.apache.avro.Schema getSchema() { return SCHEMA$; }
// Used by DatumWriter. Applications should not call.
public java.lang.Object get(int field$) {
switch (field$) {
case 0: return taskid;
case 1: return taskType;
case 2: return finishTime;
case 3: return status;
case 4: return counters;
case 5: return successfulAttemptId;
default: throw new io.prestosql.hadoop.$internal.org.apache.avro.AvroRuntimeException("Bad index");
}
}
// Used by DatumReader. Applications should not call.
@SuppressWarnings(value="unchecked")
public void put(int field$, java.lang.Object value$) {
switch (field$) {
case 0: taskid = (java.lang.CharSequence)value$; break;
case 1: taskType = (java.lang.CharSequence)value$; break;
case 2: finishTime = (java.lang.Long)value$; break;
case 3: status = (java.lang.CharSequence)value$; break;
case 4: counters = (org.apache.hadoop.mapreduce.jobhistory.JhCounters)value$; break;
case 5: successfulAttemptId = (java.lang.CharSequence)value$; break;
default: throw new io.prestosql.hadoop.$internal.org.apache.avro.AvroRuntimeException("Bad index");
}
}
/**
* Gets the value of the 'taskid' field.
*/
public java.lang.CharSequence getTaskid() {
return taskid;
}
/**
* Sets the value of the 'taskid' field.
* @param value the value to set.
*/
public void setTaskid(java.lang.CharSequence value) {
this.taskid = value;
}
/**
* Gets the value of the 'taskType' field.
*/
public java.lang.CharSequence getTaskType() {
return taskType;
}
/**
* Sets the value of the 'taskType' field.
* @param value the value to set.
*/
public void setTaskType(java.lang.CharSequence value) {
this.taskType = value;
}
/**
* Gets the value of the 'finishTime' field.
*/
public java.lang.Long getFinishTime() {
return finishTime;
}
/**
* Sets the value of the 'finishTime' field.
* @param value the value to set.
*/
public void setFinishTime(java.lang.Long value) {
this.finishTime = value;
}
/**
* Gets the value of the 'status' field.
*/
public java.lang.CharSequence getStatus() {
return status;
}
/**
* Sets the value of the 'status' field.
* @param value the value to set.
*/
public void setStatus(java.lang.CharSequence value) {
this.status = value;
}
/**
* Gets the value of the 'counters' field.
*/
public org.apache.hadoop.mapreduce.jobhistory.JhCounters getCounters() {
return counters;
}
/**
* Sets the value of the 'counters' field.
* @param value the value to set.
*/
public void setCounters(org.apache.hadoop.mapreduce.jobhistory.JhCounters value) {
this.counters = value;
}
/**
* Gets the value of the 'successfulAttemptId' field.
*/
public java.lang.CharSequence getSuccessfulAttemptId() {
return successfulAttemptId;
}
/**
* Sets the value of the 'successfulAttemptId' field.
* @param value the value to set.
*/
public void setSuccessfulAttemptId(java.lang.CharSequence value) {
this.successfulAttemptId = value;
}
/** Creates a new TaskFinished RecordBuilder */
public static org.apache.hadoop.mapreduce.jobhistory.TaskFinished.Builder newBuilder() {
return new org.apache.hadoop.mapreduce.jobhistory.TaskFinished.Builder();
}
/** Creates a new TaskFinished RecordBuilder by copying an existing Builder */
public static org.apache.hadoop.mapreduce.jobhistory.TaskFinished.Builder newBuilder(org.apache.hadoop.mapreduce.jobhistory.TaskFinished.Builder other) {
return new org.apache.hadoop.mapreduce.jobhistory.TaskFinished.Builder(other);
}
/** Creates a new TaskFinished RecordBuilder by copying an existing TaskFinished instance */
public static org.apache.hadoop.mapreduce.jobhistory.TaskFinished.Builder newBuilder(org.apache.hadoop.mapreduce.jobhistory.TaskFinished other) {
return new org.apache.hadoop.mapreduce.jobhistory.TaskFinished.Builder(other);
}
/**
* RecordBuilder for TaskFinished instances.
*/
public static class Builder extends io.prestosql.hadoop.$internal.org.apache.avro.specific.SpecificRecordBuilderBase
implements io.prestosql.hadoop.$internal.org.apache.avro.data.RecordBuilder {
private java.lang.CharSequence taskid;
private java.lang.CharSequence taskType;
private long finishTime;
private java.lang.CharSequence status;
private org.apache.hadoop.mapreduce.jobhistory.JhCounters counters;
private java.lang.CharSequence successfulAttemptId;
/** Creates a new Builder */
private Builder() {
super(org.apache.hadoop.mapreduce.jobhistory.TaskFinished.SCHEMA$);
}
/** Creates a Builder by copying an existing Builder */
private Builder(org.apache.hadoop.mapreduce.jobhistory.TaskFinished.Builder other) {
super(other);
if (isValidValue(fields()[0], other.taskid)) {
this.taskid = data().deepCopy(fields()[0].schema(), other.taskid);
fieldSetFlags()[0] = true;
}
if (isValidValue(fields()[1], other.taskType)) {
this.taskType = data().deepCopy(fields()[1].schema(), other.taskType);
fieldSetFlags()[1] = true;
}
if (isValidValue(fields()[2], other.finishTime)) {
this.finishTime = data().deepCopy(fields()[2].schema(), other.finishTime);
fieldSetFlags()[2] = true;
}
if (isValidValue(fields()[3], other.status)) {
this.status = data().deepCopy(fields()[3].schema(), other.status);
fieldSetFlags()[3] = true;
}
if (isValidValue(fields()[4], other.counters)) {
this.counters = data().deepCopy(fields()[4].schema(), other.counters);
fieldSetFlags()[4] = true;
}
if (isValidValue(fields()[5], other.successfulAttemptId)) {
this.successfulAttemptId = data().deepCopy(fields()[5].schema(), other.successfulAttemptId);
fieldSetFlags()[5] = true;
}
}
/** Creates a Builder by copying an existing TaskFinished instance */
private Builder(org.apache.hadoop.mapreduce.jobhistory.TaskFinished other) {
super(org.apache.hadoop.mapreduce.jobhistory.TaskFinished.SCHEMA$);
if (isValidValue(fields()[0], other.taskid)) {
this.taskid = data().deepCopy(fields()[0].schema(), other.taskid);
fieldSetFlags()[0] = true;
}
if (isValidValue(fields()[1], other.taskType)) {
this.taskType = data().deepCopy(fields()[1].schema(), other.taskType);
fieldSetFlags()[1] = true;
}
if (isValidValue(fields()[2], other.finishTime)) {
this.finishTime = data().deepCopy(fields()[2].schema(), other.finishTime);
fieldSetFlags()[2] = true;
}
if (isValidValue(fields()[3], other.status)) {
this.status = data().deepCopy(fields()[3].schema(), other.status);
fieldSetFlags()[3] = true;
}
if (isValidValue(fields()[4], other.counters)) {
this.counters = data().deepCopy(fields()[4].schema(), other.counters);
fieldSetFlags()[4] = true;
}
if (isValidValue(fields()[5], other.successfulAttemptId)) {
this.successfulAttemptId = data().deepCopy(fields()[5].schema(), other.successfulAttemptId);
fieldSetFlags()[5] = true;
}
}
/** Gets the value of the 'taskid' field */
public java.lang.CharSequence getTaskid() {
return taskid;
}
/** Sets the value of the 'taskid' field */
public org.apache.hadoop.mapreduce.jobhistory.TaskFinished.Builder setTaskid(java.lang.CharSequence value) {
validate(fields()[0], value);
this.taskid = value;
fieldSetFlags()[0] = true;
return this;
}
/** Checks whether the 'taskid' field has been set */
public boolean hasTaskid() {
return fieldSetFlags()[0];
}
/** Clears the value of the 'taskid' field */
public org.apache.hadoop.mapreduce.jobhistory.TaskFinished.Builder clearTaskid() {
taskid = null;
fieldSetFlags()[0] = false;
return this;
}
/** Gets the value of the 'taskType' field */
public java.lang.CharSequence getTaskType() {
return taskType;
}
/** Sets the value of the 'taskType' field */
public org.apache.hadoop.mapreduce.jobhistory.TaskFinished.Builder setTaskType(java.lang.CharSequence value) {
validate(fields()[1], value);
this.taskType = value;
fieldSetFlags()[1] = true;
return this;
}
/** Checks whether the 'taskType' field has been set */
public boolean hasTaskType() {
return fieldSetFlags()[1];
}
/** Clears the value of the 'taskType' field */
public org.apache.hadoop.mapreduce.jobhistory.TaskFinished.Builder clearTaskType() {
taskType = null;
fieldSetFlags()[1] = false;
return this;
}
/** Gets the value of the 'finishTime' field */
public java.lang.Long getFinishTime() {
return finishTime;
}
/** Sets the value of the 'finishTime' field */
public org.apache.hadoop.mapreduce.jobhistory.TaskFinished.Builder setFinishTime(long value) {
validate(fields()[2], value);
this.finishTime = value;
fieldSetFlags()[2] = true;
return this;
}
/** Checks whether the 'finishTime' field has been set */
public boolean hasFinishTime() {
return fieldSetFlags()[2];
}
/** Clears the value of the 'finishTime' field */
public org.apache.hadoop.mapreduce.jobhistory.TaskFinished.Builder clearFinishTime() {
fieldSetFlags()[2] = false;
return this;
}
/** Gets the value of the 'status' field */
public java.lang.CharSequence getStatus() {
return status;
}
/** Sets the value of the 'status' field */
public org.apache.hadoop.mapreduce.jobhistory.TaskFinished.Builder setStatus(java.lang.CharSequence value) {
validate(fields()[3], value);
this.status = value;
fieldSetFlags()[3] = true;
return this;
}
/** Checks whether the 'status' field has been set */
public boolean hasStatus() {
return fieldSetFlags()[3];
}
/** Clears the value of the 'status' field */
public org.apache.hadoop.mapreduce.jobhistory.TaskFinished.Builder clearStatus() {
status = null;
fieldSetFlags()[3] = false;
return this;
}
/** Gets the value of the 'counters' field */
public org.apache.hadoop.mapreduce.jobhistory.JhCounters getCounters() {
return counters;
}
/** Sets the value of the 'counters' field */
public org.apache.hadoop.mapreduce.jobhistory.TaskFinished.Builder setCounters(org.apache.hadoop.mapreduce.jobhistory.JhCounters value) {
validate(fields()[4], value);
this.counters = value;
fieldSetFlags()[4] = true;
return this;
}
/** Checks whether the 'counters' field has been set */
public boolean hasCounters() {
return fieldSetFlags()[4];
}
/** Clears the value of the 'counters' field */
public org.apache.hadoop.mapreduce.jobhistory.TaskFinished.Builder clearCounters() {
counters = null;
fieldSetFlags()[4] = false;
return this;
}
/** Gets the value of the 'successfulAttemptId' field */
public java.lang.CharSequence getSuccessfulAttemptId() {
return successfulAttemptId;
}
/** Sets the value of the 'successfulAttemptId' field */
public org.apache.hadoop.mapreduce.jobhistory.TaskFinished.Builder setSuccessfulAttemptId(java.lang.CharSequence value) {
validate(fields()[5], value);
this.successfulAttemptId = value;
fieldSetFlags()[5] = true;
return this;
}
/** Checks whether the 'successfulAttemptId' field has been set */
public boolean hasSuccessfulAttemptId() {
return fieldSetFlags()[5];
}
/** Clears the value of the 'successfulAttemptId' field */
public org.apache.hadoop.mapreduce.jobhistory.TaskFinished.Builder clearSuccessfulAttemptId() {
successfulAttemptId = null;
fieldSetFlags()[5] = false;
return this;
}
@Override
public TaskFinished build() {
try {
TaskFinished record = new TaskFinished();
record.taskid = fieldSetFlags()[0] ? this.taskid : (java.lang.CharSequence) defaultValue(fields()[0]);
record.taskType = fieldSetFlags()[1] ? this.taskType : (java.lang.CharSequence) defaultValue(fields()[1]);
record.finishTime = fieldSetFlags()[2] ? this.finishTime : (java.lang.Long) defaultValue(fields()[2]);
record.status = fieldSetFlags()[3] ? this.status : (java.lang.CharSequence) defaultValue(fields()[3]);
record.counters = fieldSetFlags()[4] ? this.counters : (org.apache.hadoop.mapreduce.jobhistory.JhCounters) defaultValue(fields()[4]);
record.successfulAttemptId = fieldSetFlags()[5] ? this.successfulAttemptId : (java.lang.CharSequence) defaultValue(fields()[5]);
return record;
} catch (Exception e) {
throw new io.prestosql.hadoop.$internal.org.apache.avro.AvroRuntimeException(e);
}
}
}
}