All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished Maven / Gradle / Ivy

/**
 * Autogenerated by Avro
 * 
 * DO NOT EDIT DIRECTLY
 */
package org.apache.hadoop.mapreduce.jobhistory;  
@SuppressWarnings("all")
@org.apache.avro.specific.AvroGenerated
public class ReduceAttemptFinished extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
  public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"ReduceAttemptFinished\",\"namespace\":\"org.apache.hadoop.mapreduce.jobhistory\",\"fields\":[{\"name\":\"taskid\",\"type\":\"string\"},{\"name\":\"attemptId\",\"type\":\"string\"},{\"name\":\"taskType\",\"type\":\"string\"},{\"name\":\"taskStatus\",\"type\":\"string\"},{\"name\":\"shuffleFinishTime\",\"type\":\"long\"},{\"name\":\"sortFinishTime\",\"type\":\"long\"},{\"name\":\"finishTime\",\"type\":\"long\"},{\"name\":\"hostname\",\"type\":\"string\"},{\"name\":\"port\",\"type\":\"int\"},{\"name\":\"rackname\",\"type\":\"string\"},{\"name\":\"state\",\"type\":\"string\"},{\"name\":\"counters\",\"type\":{\"type\":\"record\",\"name\":\"JhCounters\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"groups\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"JhCounterGroup\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"displayName\",\"type\":\"string\"},{\"name\":\"counts\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"JhCounter\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"displayName\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"long\"}]}}}]}}}]}},{\"name\":\"clockSplits\",\"type\":{\"type\":\"array\",\"items\":\"int\"}},{\"name\":\"cpuUsages\",\"type\":{\"type\":\"array\",\"items\":\"int\"}},{\"name\":\"vMemKbytes\",\"type\":{\"type\":\"array\",\"items\":\"int\"}},{\"name\":\"physMemKbytes\",\"type\":{\"type\":\"array\",\"items\":\"int\"}}]}");
  public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
  @Deprecated public java.lang.CharSequence taskid;
  @Deprecated public java.lang.CharSequence attemptId;
  @Deprecated public java.lang.CharSequence taskType;
  @Deprecated public java.lang.CharSequence taskStatus;
  @Deprecated public long shuffleFinishTime;
  @Deprecated public long sortFinishTime;
  @Deprecated public long finishTime;
  @Deprecated public java.lang.CharSequence hostname;
  @Deprecated public int port;
  @Deprecated public java.lang.CharSequence rackname;
  @Deprecated public java.lang.CharSequence state;
  @Deprecated public org.apache.hadoop.mapreduce.jobhistory.JhCounters counters;
  @Deprecated public java.util.List clockSplits;
  @Deprecated public java.util.List cpuUsages;
  @Deprecated public java.util.List vMemKbytes;
  @Deprecated public java.util.List physMemKbytes;

  /**
   * Default constructor.  Note that this does not initialize fields
   * to their default values from the schema.  If that is desired then
   * one should use newBuilder(). 
   */
  public ReduceAttemptFinished() {}

  /**
   * All-args constructor.
   */
  public ReduceAttemptFinished(java.lang.CharSequence taskid, java.lang.CharSequence attemptId, java.lang.CharSequence taskType, java.lang.CharSequence taskStatus, java.lang.Long shuffleFinishTime, java.lang.Long sortFinishTime, java.lang.Long finishTime, java.lang.CharSequence hostname, java.lang.Integer port, java.lang.CharSequence rackname, java.lang.CharSequence state, org.apache.hadoop.mapreduce.jobhistory.JhCounters counters, java.util.List clockSplits, java.util.List cpuUsages, java.util.List vMemKbytes, java.util.List physMemKbytes) {
    this.taskid = taskid;
    this.attemptId = attemptId;
    this.taskType = taskType;
    this.taskStatus = taskStatus;
    this.shuffleFinishTime = shuffleFinishTime;
    this.sortFinishTime = sortFinishTime;
    this.finishTime = finishTime;
    this.hostname = hostname;
    this.port = port;
    this.rackname = rackname;
    this.state = state;
    this.counters = counters;
    this.clockSplits = clockSplits;
    this.cpuUsages = cpuUsages;
    this.vMemKbytes = vMemKbytes;
    this.physMemKbytes = physMemKbytes;
  }

  public org.apache.avro.Schema getSchema() { return SCHEMA$; }
  // Used by DatumWriter.  Applications should not call. 
  public java.lang.Object get(int field$) {
    switch (field$) {
    case 0: return taskid;
    case 1: return attemptId;
    case 2: return taskType;
    case 3: return taskStatus;
    case 4: return shuffleFinishTime;
    case 5: return sortFinishTime;
    case 6: return finishTime;
    case 7: return hostname;
    case 8: return port;
    case 9: return rackname;
    case 10: return state;
    case 11: return counters;
    case 12: return clockSplits;
    case 13: return cpuUsages;
    case 14: return vMemKbytes;
    case 15: return physMemKbytes;
    default: throw new org.apache.avro.AvroRuntimeException("Bad index");
    }
  }
  // Used by DatumReader.  Applications should not call. 
  @SuppressWarnings(value="unchecked")
  public void put(int field$, java.lang.Object value$) {
    switch (field$) {
    case 0: taskid = (java.lang.CharSequence)value$; break;
    case 1: attemptId = (java.lang.CharSequence)value$; break;
    case 2: taskType = (java.lang.CharSequence)value$; break;
    case 3: taskStatus = (java.lang.CharSequence)value$; break;
    case 4: shuffleFinishTime = (java.lang.Long)value$; break;
    case 5: sortFinishTime = (java.lang.Long)value$; break;
    case 6: finishTime = (java.lang.Long)value$; break;
    case 7: hostname = (java.lang.CharSequence)value$; break;
    case 8: port = (java.lang.Integer)value$; break;
    case 9: rackname = (java.lang.CharSequence)value$; break;
    case 10: state = (java.lang.CharSequence)value$; break;
    case 11: counters = (org.apache.hadoop.mapreduce.jobhistory.JhCounters)value$; break;
    case 12: clockSplits = (java.util.List)value$; break;
    case 13: cpuUsages = (java.util.List)value$; break;
    case 14: vMemKbytes = (java.util.List)value$; break;
    case 15: physMemKbytes = (java.util.List)value$; break;
    default: throw new org.apache.avro.AvroRuntimeException("Bad index");
    }
  }

  /**
   * Gets the value of the 'taskid' field.
   */
  public java.lang.CharSequence getTaskid() {
    return taskid;
  }

  /**
   * Sets the value of the 'taskid' field.
   * @param value the value to set.
   */
  public void setTaskid(java.lang.CharSequence value) {
    this.taskid = value;
  }

  /**
   * Gets the value of the 'attemptId' field.
   */
  public java.lang.CharSequence getAttemptId() {
    return attemptId;
  }

  /**
   * Sets the value of the 'attemptId' field.
   * @param value the value to set.
   */
  public void setAttemptId(java.lang.CharSequence value) {
    this.attemptId = value;
  }

  /**
   * Gets the value of the 'taskType' field.
   */
  public java.lang.CharSequence getTaskType() {
    return taskType;
  }

  /**
   * Sets the value of the 'taskType' field.
   * @param value the value to set.
   */
  public void setTaskType(java.lang.CharSequence value) {
    this.taskType = value;
  }

  /**
   * Gets the value of the 'taskStatus' field.
   */
  public java.lang.CharSequence getTaskStatus() {
    return taskStatus;
  }

  /**
   * Sets the value of the 'taskStatus' field.
   * @param value the value to set.
   */
  public void setTaskStatus(java.lang.CharSequence value) {
    this.taskStatus = value;
  }

  /**
   * Gets the value of the 'shuffleFinishTime' field.
   */
  public java.lang.Long getShuffleFinishTime() {
    return shuffleFinishTime;
  }

  /**
   * Sets the value of the 'shuffleFinishTime' field.
   * @param value the value to set.
   */
  public void setShuffleFinishTime(java.lang.Long value) {
    this.shuffleFinishTime = value;
  }

  /**
   * Gets the value of the 'sortFinishTime' field.
   */
  public java.lang.Long getSortFinishTime() {
    return sortFinishTime;
  }

  /**
   * Sets the value of the 'sortFinishTime' field.
   * @param value the value to set.
   */
  public void setSortFinishTime(java.lang.Long value) {
    this.sortFinishTime = value;
  }

  /**
   * Gets the value of the 'finishTime' field.
   */
  public java.lang.Long getFinishTime() {
    return finishTime;
  }

  /**
   * Sets the value of the 'finishTime' field.
   * @param value the value to set.
   */
  public void setFinishTime(java.lang.Long value) {
    this.finishTime = value;
  }

  /**
   * Gets the value of the 'hostname' field.
   */
  public java.lang.CharSequence getHostname() {
    return hostname;
  }

  /**
   * Sets the value of the 'hostname' field.
   * @param value the value to set.
   */
  public void setHostname(java.lang.CharSequence value) {
    this.hostname = value;
  }

  /**
   * Gets the value of the 'port' field.
   */
  public java.lang.Integer getPort() {
    return port;
  }

  /**
   * Sets the value of the 'port' field.
   * @param value the value to set.
   */
  public void setPort(java.lang.Integer value) {
    this.port = value;
  }

  /**
   * Gets the value of the 'rackname' field.
   */
  public java.lang.CharSequence getRackname() {
    return rackname;
  }

  /**
   * Sets the value of the 'rackname' field.
   * @param value the value to set.
   */
  public void setRackname(java.lang.CharSequence value) {
    this.rackname = value;
  }

  /**
   * Gets the value of the 'state' field.
   */
  public java.lang.CharSequence getState() {
    return state;
  }

  /**
   * Sets the value of the 'state' field.
   * @param value the value to set.
   */
  public void setState(java.lang.CharSequence value) {
    this.state = value;
  }

  /**
   * Gets the value of the 'counters' field.
   */
  public org.apache.hadoop.mapreduce.jobhistory.JhCounters getCounters() {
    return counters;
  }

  /**
   * Sets the value of the 'counters' field.
   * @param value the value to set.
   */
  public void setCounters(org.apache.hadoop.mapreduce.jobhistory.JhCounters value) {
    this.counters = value;
  }

  /**
   * Gets the value of the 'clockSplits' field.
   */
  public java.util.List getClockSplits() {
    return clockSplits;
  }

  /**
   * Sets the value of the 'clockSplits' field.
   * @param value the value to set.
   */
  public void setClockSplits(java.util.List value) {
    this.clockSplits = value;
  }

  /**
   * Gets the value of the 'cpuUsages' field.
   */
  public java.util.List getCpuUsages() {
    return cpuUsages;
  }

  /**
   * Sets the value of the 'cpuUsages' field.
   * @param value the value to set.
   */
  public void setCpuUsages(java.util.List value) {
    this.cpuUsages = value;
  }

  /**
   * Gets the value of the 'vMemKbytes' field.
   */
  public java.util.List getVMemKbytes() {
    return vMemKbytes;
  }

  /**
   * Sets the value of the 'vMemKbytes' field.
   * @param value the value to set.
   */
  public void setVMemKbytes(java.util.List value) {
    this.vMemKbytes = value;
  }

  /**
   * Gets the value of the 'physMemKbytes' field.
   */
  public java.util.List getPhysMemKbytes() {
    return physMemKbytes;
  }

  /**
   * Sets the value of the 'physMemKbytes' field.
   * @param value the value to set.
   */
  public void setPhysMemKbytes(java.util.List value) {
    this.physMemKbytes = value;
  }

  /** Creates a new ReduceAttemptFinished RecordBuilder */
  public static org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder newBuilder() {
    return new org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder();
  }
  
  /** Creates a new ReduceAttemptFinished RecordBuilder by copying an existing Builder */
  public static org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder newBuilder(org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder other) {
    return new org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder(other);
  }
  
  /** Creates a new ReduceAttemptFinished RecordBuilder by copying an existing ReduceAttemptFinished instance */
  public static org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder newBuilder(org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished other) {
    return new org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder(other);
  }
  
  /**
   * RecordBuilder for ReduceAttemptFinished instances.
   */
  public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase
    implements org.apache.avro.data.RecordBuilder {

    private java.lang.CharSequence taskid;
    private java.lang.CharSequence attemptId;
    private java.lang.CharSequence taskType;
    private java.lang.CharSequence taskStatus;
    private long shuffleFinishTime;
    private long sortFinishTime;
    private long finishTime;
    private java.lang.CharSequence hostname;
    private int port;
    private java.lang.CharSequence rackname;
    private java.lang.CharSequence state;
    private org.apache.hadoop.mapreduce.jobhistory.JhCounters counters;
    private java.util.List clockSplits;
    private java.util.List cpuUsages;
    private java.util.List vMemKbytes;
    private java.util.List physMemKbytes;

    /** Creates a new Builder */
    private Builder() {
      super(org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.SCHEMA$);
    }
    
    /** Creates a Builder by copying an existing Builder */
    private Builder(org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder other) {
      super(other);
      if (isValidValue(fields()[0], other.taskid)) {
        this.taskid = data().deepCopy(fields()[0].schema(), other.taskid);
        fieldSetFlags()[0] = true;
      }
      if (isValidValue(fields()[1], other.attemptId)) {
        this.attemptId = data().deepCopy(fields()[1].schema(), other.attemptId);
        fieldSetFlags()[1] = true;
      }
      if (isValidValue(fields()[2], other.taskType)) {
        this.taskType = data().deepCopy(fields()[2].schema(), other.taskType);
        fieldSetFlags()[2] = true;
      }
      if (isValidValue(fields()[3], other.taskStatus)) {
        this.taskStatus = data().deepCopy(fields()[3].schema(), other.taskStatus);
        fieldSetFlags()[3] = true;
      }
      if (isValidValue(fields()[4], other.shuffleFinishTime)) {
        this.shuffleFinishTime = data().deepCopy(fields()[4].schema(), other.shuffleFinishTime);
        fieldSetFlags()[4] = true;
      }
      if (isValidValue(fields()[5], other.sortFinishTime)) {
        this.sortFinishTime = data().deepCopy(fields()[5].schema(), other.sortFinishTime);
        fieldSetFlags()[5] = true;
      }
      if (isValidValue(fields()[6], other.finishTime)) {
        this.finishTime = data().deepCopy(fields()[6].schema(), other.finishTime);
        fieldSetFlags()[6] = true;
      }
      if (isValidValue(fields()[7], other.hostname)) {
        this.hostname = data().deepCopy(fields()[7].schema(), other.hostname);
        fieldSetFlags()[7] = true;
      }
      if (isValidValue(fields()[8], other.port)) {
        this.port = data().deepCopy(fields()[8].schema(), other.port);
        fieldSetFlags()[8] = true;
      }
      if (isValidValue(fields()[9], other.rackname)) {
        this.rackname = data().deepCopy(fields()[9].schema(), other.rackname);
        fieldSetFlags()[9] = true;
      }
      if (isValidValue(fields()[10], other.state)) {
        this.state = data().deepCopy(fields()[10].schema(), other.state);
        fieldSetFlags()[10] = true;
      }
      if (isValidValue(fields()[11], other.counters)) {
        this.counters = data().deepCopy(fields()[11].schema(), other.counters);
        fieldSetFlags()[11] = true;
      }
      if (isValidValue(fields()[12], other.clockSplits)) {
        this.clockSplits = data().deepCopy(fields()[12].schema(), other.clockSplits);
        fieldSetFlags()[12] = true;
      }
      if (isValidValue(fields()[13], other.cpuUsages)) {
        this.cpuUsages = data().deepCopy(fields()[13].schema(), other.cpuUsages);
        fieldSetFlags()[13] = true;
      }
      if (isValidValue(fields()[14], other.vMemKbytes)) {
        this.vMemKbytes = data().deepCopy(fields()[14].schema(), other.vMemKbytes);
        fieldSetFlags()[14] = true;
      }
      if (isValidValue(fields()[15], other.physMemKbytes)) {
        this.physMemKbytes = data().deepCopy(fields()[15].schema(), other.physMemKbytes);
        fieldSetFlags()[15] = true;
      }
    }
    
    /** Creates a Builder by copying an existing ReduceAttemptFinished instance */
    private Builder(org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished other) {
            super(org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.SCHEMA$);
      if (isValidValue(fields()[0], other.taskid)) {
        this.taskid = data().deepCopy(fields()[0].schema(), other.taskid);
        fieldSetFlags()[0] = true;
      }
      if (isValidValue(fields()[1], other.attemptId)) {
        this.attemptId = data().deepCopy(fields()[1].schema(), other.attemptId);
        fieldSetFlags()[1] = true;
      }
      if (isValidValue(fields()[2], other.taskType)) {
        this.taskType = data().deepCopy(fields()[2].schema(), other.taskType);
        fieldSetFlags()[2] = true;
      }
      if (isValidValue(fields()[3], other.taskStatus)) {
        this.taskStatus = data().deepCopy(fields()[3].schema(), other.taskStatus);
        fieldSetFlags()[3] = true;
      }
      if (isValidValue(fields()[4], other.shuffleFinishTime)) {
        this.shuffleFinishTime = data().deepCopy(fields()[4].schema(), other.shuffleFinishTime);
        fieldSetFlags()[4] = true;
      }
      if (isValidValue(fields()[5], other.sortFinishTime)) {
        this.sortFinishTime = data().deepCopy(fields()[5].schema(), other.sortFinishTime);
        fieldSetFlags()[5] = true;
      }
      if (isValidValue(fields()[6], other.finishTime)) {
        this.finishTime = data().deepCopy(fields()[6].schema(), other.finishTime);
        fieldSetFlags()[6] = true;
      }
      if (isValidValue(fields()[7], other.hostname)) {
        this.hostname = data().deepCopy(fields()[7].schema(), other.hostname);
        fieldSetFlags()[7] = true;
      }
      if (isValidValue(fields()[8], other.port)) {
        this.port = data().deepCopy(fields()[8].schema(), other.port);
        fieldSetFlags()[8] = true;
      }
      if (isValidValue(fields()[9], other.rackname)) {
        this.rackname = data().deepCopy(fields()[9].schema(), other.rackname);
        fieldSetFlags()[9] = true;
      }
      if (isValidValue(fields()[10], other.state)) {
        this.state = data().deepCopy(fields()[10].schema(), other.state);
        fieldSetFlags()[10] = true;
      }
      if (isValidValue(fields()[11], other.counters)) {
        this.counters = data().deepCopy(fields()[11].schema(), other.counters);
        fieldSetFlags()[11] = true;
      }
      if (isValidValue(fields()[12], other.clockSplits)) {
        this.clockSplits = data().deepCopy(fields()[12].schema(), other.clockSplits);
        fieldSetFlags()[12] = true;
      }
      if (isValidValue(fields()[13], other.cpuUsages)) {
        this.cpuUsages = data().deepCopy(fields()[13].schema(), other.cpuUsages);
        fieldSetFlags()[13] = true;
      }
      if (isValidValue(fields()[14], other.vMemKbytes)) {
        this.vMemKbytes = data().deepCopy(fields()[14].schema(), other.vMemKbytes);
        fieldSetFlags()[14] = true;
      }
      if (isValidValue(fields()[15], other.physMemKbytes)) {
        this.physMemKbytes = data().deepCopy(fields()[15].schema(), other.physMemKbytes);
        fieldSetFlags()[15] = true;
      }
    }

    /** Gets the value of the 'taskid' field */
    public java.lang.CharSequence getTaskid() {
      return taskid;
    }
    
    /** Sets the value of the 'taskid' field */
    public org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder setTaskid(java.lang.CharSequence value) {
      validate(fields()[0], value);
      this.taskid = value;
      fieldSetFlags()[0] = true;
      return this; 
    }
    
    /** Checks whether the 'taskid' field has been set */
    public boolean hasTaskid() {
      return fieldSetFlags()[0];
    }
    
    /** Clears the value of the 'taskid' field */
    public org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder clearTaskid() {
      taskid = null;
      fieldSetFlags()[0] = false;
      return this;
    }

    /** Gets the value of the 'attemptId' field */
    public java.lang.CharSequence getAttemptId() {
      return attemptId;
    }
    
    /** Sets the value of the 'attemptId' field */
    public org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder setAttemptId(java.lang.CharSequence value) {
      validate(fields()[1], value);
      this.attemptId = value;
      fieldSetFlags()[1] = true;
      return this; 
    }
    
    /** Checks whether the 'attemptId' field has been set */
    public boolean hasAttemptId() {
      return fieldSetFlags()[1];
    }
    
    /** Clears the value of the 'attemptId' field */
    public org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder clearAttemptId() {
      attemptId = null;
      fieldSetFlags()[1] = false;
      return this;
    }

    /** Gets the value of the 'taskType' field */
    public java.lang.CharSequence getTaskType() {
      return taskType;
    }
    
    /** Sets the value of the 'taskType' field */
    public org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder setTaskType(java.lang.CharSequence value) {
      validate(fields()[2], value);
      this.taskType = value;
      fieldSetFlags()[2] = true;
      return this; 
    }
    
    /** Checks whether the 'taskType' field has been set */
    public boolean hasTaskType() {
      return fieldSetFlags()[2];
    }
    
    /** Clears the value of the 'taskType' field */
    public org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder clearTaskType() {
      taskType = null;
      fieldSetFlags()[2] = false;
      return this;
    }

    /** Gets the value of the 'taskStatus' field */
    public java.lang.CharSequence getTaskStatus() {
      return taskStatus;
    }
    
    /** Sets the value of the 'taskStatus' field */
    public org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder setTaskStatus(java.lang.CharSequence value) {
      validate(fields()[3], value);
      this.taskStatus = value;
      fieldSetFlags()[3] = true;
      return this; 
    }
    
    /** Checks whether the 'taskStatus' field has been set */
    public boolean hasTaskStatus() {
      return fieldSetFlags()[3];
    }
    
    /** Clears the value of the 'taskStatus' field */
    public org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder clearTaskStatus() {
      taskStatus = null;
      fieldSetFlags()[3] = false;
      return this;
    }

    /** Gets the value of the 'shuffleFinishTime' field */
    public java.lang.Long getShuffleFinishTime() {
      return shuffleFinishTime;
    }
    
    /** Sets the value of the 'shuffleFinishTime' field */
    public org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder setShuffleFinishTime(long value) {
      validate(fields()[4], value);
      this.shuffleFinishTime = value;
      fieldSetFlags()[4] = true;
      return this; 
    }
    
    /** Checks whether the 'shuffleFinishTime' field has been set */
    public boolean hasShuffleFinishTime() {
      return fieldSetFlags()[4];
    }
    
    /** Clears the value of the 'shuffleFinishTime' field */
    public org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder clearShuffleFinishTime() {
      fieldSetFlags()[4] = false;
      return this;
    }

    /** Gets the value of the 'sortFinishTime' field */
    public java.lang.Long getSortFinishTime() {
      return sortFinishTime;
    }
    
    /** Sets the value of the 'sortFinishTime' field */
    public org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder setSortFinishTime(long value) {
      validate(fields()[5], value);
      this.sortFinishTime = value;
      fieldSetFlags()[5] = true;
      return this; 
    }
    
    /** Checks whether the 'sortFinishTime' field has been set */
    public boolean hasSortFinishTime() {
      return fieldSetFlags()[5];
    }
    
    /** Clears the value of the 'sortFinishTime' field */
    public org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder clearSortFinishTime() {
      fieldSetFlags()[5] = false;
      return this;
    }

    /** Gets the value of the 'finishTime' field */
    public java.lang.Long getFinishTime() {
      return finishTime;
    }
    
    /** Sets the value of the 'finishTime' field */
    public org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder setFinishTime(long value) {
      validate(fields()[6], value);
      this.finishTime = value;
      fieldSetFlags()[6] = true;
      return this; 
    }
    
    /** Checks whether the 'finishTime' field has been set */
    public boolean hasFinishTime() {
      return fieldSetFlags()[6];
    }
    
    /** Clears the value of the 'finishTime' field */
    public org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder clearFinishTime() {
      fieldSetFlags()[6] = false;
      return this;
    }

    /** Gets the value of the 'hostname' field */
    public java.lang.CharSequence getHostname() {
      return hostname;
    }
    
    /** Sets the value of the 'hostname' field */
    public org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder setHostname(java.lang.CharSequence value) {
      validate(fields()[7], value);
      this.hostname = value;
      fieldSetFlags()[7] = true;
      return this; 
    }
    
    /** Checks whether the 'hostname' field has been set */
    public boolean hasHostname() {
      return fieldSetFlags()[7];
    }
    
    /** Clears the value of the 'hostname' field */
    public org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder clearHostname() {
      hostname = null;
      fieldSetFlags()[7] = false;
      return this;
    }

    /** Gets the value of the 'port' field */
    public java.lang.Integer getPort() {
      return port;
    }
    
    /** Sets the value of the 'port' field */
    public org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder setPort(int value) {
      validate(fields()[8], value);
      this.port = value;
      fieldSetFlags()[8] = true;
      return this; 
    }
    
    /** Checks whether the 'port' field has been set */
    public boolean hasPort() {
      return fieldSetFlags()[8];
    }
    
    /** Clears the value of the 'port' field */
    public org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder clearPort() {
      fieldSetFlags()[8] = false;
      return this;
    }

    /** Gets the value of the 'rackname' field */
    public java.lang.CharSequence getRackname() {
      return rackname;
    }
    
    /** Sets the value of the 'rackname' field */
    public org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder setRackname(java.lang.CharSequence value) {
      validate(fields()[9], value);
      this.rackname = value;
      fieldSetFlags()[9] = true;
      return this; 
    }
    
    /** Checks whether the 'rackname' field has been set */
    public boolean hasRackname() {
      return fieldSetFlags()[9];
    }
    
    /** Clears the value of the 'rackname' field */
    public org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder clearRackname() {
      rackname = null;
      fieldSetFlags()[9] = false;
      return this;
    }

    /** Gets the value of the 'state' field */
    public java.lang.CharSequence getState() {
      return state;
    }
    
    /** Sets the value of the 'state' field */
    public org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder setState(java.lang.CharSequence value) {
      validate(fields()[10], value);
      this.state = value;
      fieldSetFlags()[10] = true;
      return this; 
    }
    
    /** Checks whether the 'state' field has been set */
    public boolean hasState() {
      return fieldSetFlags()[10];
    }
    
    /** Clears the value of the 'state' field */
    public org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder clearState() {
      state = null;
      fieldSetFlags()[10] = false;
      return this;
    }

    /** Gets the value of the 'counters' field */
    public org.apache.hadoop.mapreduce.jobhistory.JhCounters getCounters() {
      return counters;
    }
    
    /** Sets the value of the 'counters' field */
    public org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder setCounters(org.apache.hadoop.mapreduce.jobhistory.JhCounters value) {
      validate(fields()[11], value);
      this.counters = value;
      fieldSetFlags()[11] = true;
      return this; 
    }
    
    /** Checks whether the 'counters' field has been set */
    public boolean hasCounters() {
      return fieldSetFlags()[11];
    }
    
    /** Clears the value of the 'counters' field */
    public org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder clearCounters() {
      counters = null;
      fieldSetFlags()[11] = false;
      return this;
    }

    /** Gets the value of the 'clockSplits' field */
    public java.util.List getClockSplits() {
      return clockSplits;
    }
    
    /** Sets the value of the 'clockSplits' field */
    public org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder setClockSplits(java.util.List value) {
      validate(fields()[12], value);
      this.clockSplits = value;
      fieldSetFlags()[12] = true;
      return this; 
    }
    
    /** Checks whether the 'clockSplits' field has been set */
    public boolean hasClockSplits() {
      return fieldSetFlags()[12];
    }
    
    /** Clears the value of the 'clockSplits' field */
    public org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder clearClockSplits() {
      clockSplits = null;
      fieldSetFlags()[12] = false;
      return this;
    }

    /** Gets the value of the 'cpuUsages' field */
    public java.util.List getCpuUsages() {
      return cpuUsages;
    }
    
    /** Sets the value of the 'cpuUsages' field */
    public org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder setCpuUsages(java.util.List value) {
      validate(fields()[13], value);
      this.cpuUsages = value;
      fieldSetFlags()[13] = true;
      return this; 
    }
    
    /** Checks whether the 'cpuUsages' field has been set */
    public boolean hasCpuUsages() {
      return fieldSetFlags()[13];
    }
    
    /** Clears the value of the 'cpuUsages' field */
    public org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder clearCpuUsages() {
      cpuUsages = null;
      fieldSetFlags()[13] = false;
      return this;
    }

    /** Gets the value of the 'vMemKbytes' field */
    public java.util.List getVMemKbytes() {
      return vMemKbytes;
    }
    
    /** Sets the value of the 'vMemKbytes' field */
    public org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder setVMemKbytes(java.util.List value) {
      validate(fields()[14], value);
      this.vMemKbytes = value;
      fieldSetFlags()[14] = true;
      return this; 
    }
    
    /** Checks whether the 'vMemKbytes' field has been set */
    public boolean hasVMemKbytes() {
      return fieldSetFlags()[14];
    }
    
    /** Clears the value of the 'vMemKbytes' field */
    public org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder clearVMemKbytes() {
      vMemKbytes = null;
      fieldSetFlags()[14] = false;
      return this;
    }

    /** Gets the value of the 'physMemKbytes' field */
    public java.util.List getPhysMemKbytes() {
      return physMemKbytes;
    }
    
    /** Sets the value of the 'physMemKbytes' field */
    public org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder setPhysMemKbytes(java.util.List value) {
      validate(fields()[15], value);
      this.physMemKbytes = value;
      fieldSetFlags()[15] = true;
      return this; 
    }
    
    /** Checks whether the 'physMemKbytes' field has been set */
    public boolean hasPhysMemKbytes() {
      return fieldSetFlags()[15];
    }
    
    /** Clears the value of the 'physMemKbytes' field */
    public org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished.Builder clearPhysMemKbytes() {
      physMemKbytes = null;
      fieldSetFlags()[15] = false;
      return this;
    }

    @Override
    public ReduceAttemptFinished build() {
      try {
        ReduceAttemptFinished record = new ReduceAttemptFinished();
        record.taskid = fieldSetFlags()[0] ? this.taskid : (java.lang.CharSequence) defaultValue(fields()[0]);
        record.attemptId = fieldSetFlags()[1] ? this.attemptId : (java.lang.CharSequence) defaultValue(fields()[1]);
        record.taskType = fieldSetFlags()[2] ? this.taskType : (java.lang.CharSequence) defaultValue(fields()[2]);
        record.taskStatus = fieldSetFlags()[3] ? this.taskStatus : (java.lang.CharSequence) defaultValue(fields()[3]);
        record.shuffleFinishTime = fieldSetFlags()[4] ? this.shuffleFinishTime : (java.lang.Long) defaultValue(fields()[4]);
        record.sortFinishTime = fieldSetFlags()[5] ? this.sortFinishTime : (java.lang.Long) defaultValue(fields()[5]);
        record.finishTime = fieldSetFlags()[6] ? this.finishTime : (java.lang.Long) defaultValue(fields()[6]);
        record.hostname = fieldSetFlags()[7] ? this.hostname : (java.lang.CharSequence) defaultValue(fields()[7]);
        record.port = fieldSetFlags()[8] ? this.port : (java.lang.Integer) defaultValue(fields()[8]);
        record.rackname = fieldSetFlags()[9] ? this.rackname : (java.lang.CharSequence) defaultValue(fields()[9]);
        record.state = fieldSetFlags()[10] ? this.state : (java.lang.CharSequence) defaultValue(fields()[10]);
        record.counters = fieldSetFlags()[11] ? this.counters : (org.apache.hadoop.mapreduce.jobhistory.JhCounters) defaultValue(fields()[11]);
        record.clockSplits = fieldSetFlags()[12] ? this.clockSplits : (java.util.List) defaultValue(fields()[12]);
        record.cpuUsages = fieldSetFlags()[13] ? this.cpuUsages : (java.util.List) defaultValue(fields()[13]);
        record.vMemKbytes = fieldSetFlags()[14] ? this.vMemKbytes : (java.util.List) defaultValue(fields()[14]);
        record.physMemKbytes = fieldSetFlags()[15] ? this.physMemKbytes : (java.util.List) defaultValue(fields()[15]);
        return record;
      } catch (Exception e) {
        throw new org.apache.avro.AvroRuntimeException(e);
      }
    }
  }
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy