All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.apache.hudi.avro.model.HoodieSavepointPartitionMetadata Maven / Gradle / Ivy

The newest version!
/**
 * Autogenerated by Avro
 *
 * DO NOT EDIT DIRECTLY
 */
package org.apache.hudi.avro.model;

import org.apache.avro.generic.GenericArray;
import org.apache.avro.specific.SpecificData;
import org.apache.avro.util.Utf8;
import org.apache.avro.message.BinaryMessageEncoder;
import org.apache.avro.message.BinaryMessageDecoder;
import org.apache.avro.message.SchemaStore;

@org.apache.avro.specific.AvroGenerated
public class HoodieSavepointPartitionMetadata extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
  private static final long serialVersionUID = 6742396350755043336L;


  public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"HoodieSavepointPartitionMetadata\",\"namespace\":\"org.apache.hudi.avro.model\",\"fields\":[{\"name\":\"partitionPath\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"savepointDataFile\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}}]}");
  public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }

  private static final SpecificData MODEL$ = new SpecificData();

  private static final BinaryMessageEncoder ENCODER =
      new BinaryMessageEncoder<>(MODEL$, SCHEMA$);

  private static final BinaryMessageDecoder DECODER =
      new BinaryMessageDecoder<>(MODEL$, SCHEMA$);

  /**
   * Return the BinaryMessageEncoder instance used by this class.
   * @return the message encoder used by this class
   */
  public static BinaryMessageEncoder getEncoder() {
    return ENCODER;
  }

  /**
   * Return the BinaryMessageDecoder instance used by this class.
   * @return the message decoder used by this class
   */
  public static BinaryMessageDecoder getDecoder() {
    return DECODER;
  }

  /**
   * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link SchemaStore}.
   * @param resolver a {@link SchemaStore} used to find schemas by fingerprint
   * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore
   */
  public static BinaryMessageDecoder createDecoder(SchemaStore resolver) {
    return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver);
  }

  /**
   * Serializes this HoodieSavepointPartitionMetadata to a ByteBuffer.
   * @return a buffer holding the serialized data for this instance
   * @throws java.io.IOException if this instance could not be serialized
   */
  public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException {
    return ENCODER.encode(this);
  }

  /**
   * Deserializes a HoodieSavepointPartitionMetadata from a ByteBuffer.
   * @param b a byte buffer holding serialized data for an instance of this class
   * @return a HoodieSavepointPartitionMetadata instance decoded from the given buffer
   * @throws java.io.IOException if the given bytes could not be deserialized into an instance of this class
   */
  public static HoodieSavepointPartitionMetadata fromByteBuffer(
      java.nio.ByteBuffer b) throws java.io.IOException {
    return DECODER.decode(b);
  }

  private java.lang.String partitionPath;
  private java.util.List savepointDataFile;

  /**
   * Default constructor.  Note that this does not initialize fields
   * to their default values from the schema.  If that is desired then
   * one should use newBuilder().
   */
  public HoodieSavepointPartitionMetadata() {}

  /**
   * All-args constructor.
   * @param partitionPath The new value for partitionPath
   * @param savepointDataFile The new value for savepointDataFile
   */
  public HoodieSavepointPartitionMetadata(java.lang.String partitionPath, java.util.List savepointDataFile) {
    this.partitionPath = partitionPath;
    this.savepointDataFile = savepointDataFile;
  }

  @Override
  public org.apache.avro.specific.SpecificData getSpecificData() { return MODEL$; }

  @Override
  public org.apache.avro.Schema getSchema() { return SCHEMA$; }

  // Used by DatumWriter.  Applications should not call.
  @Override
  public java.lang.Object get(int field$) {
    switch (field$) {
    case 0: return partitionPath;
    case 1: return savepointDataFile;
    default: throw new IndexOutOfBoundsException("Invalid index: " + field$);
    }
  }

  // Used by DatumReader.  Applications should not call.
  @Override
  @SuppressWarnings(value="unchecked")
  public void put(int field$, java.lang.Object value$) {
    switch (field$) {
    case 0: partitionPath = value$ != null ? value$.toString() : null; break;
    case 1: savepointDataFile = (java.util.List)value$; break;
    default: throw new IndexOutOfBoundsException("Invalid index: " + field$);
    }
  }

  /**
   * Gets the value of the 'partitionPath' field.
   * @return The value of the 'partitionPath' field.
   */
  public java.lang.String getPartitionPath() {
    return partitionPath;
  }


  /**
   * Sets the value of the 'partitionPath' field.
   * @param value the value to set.
   */
  public void setPartitionPath(java.lang.String value) {
    this.partitionPath = value;
  }

  /**
   * Gets the value of the 'savepointDataFile' field.
   * @return The value of the 'savepointDataFile' field.
   */
  public java.util.List getSavepointDataFile() {
    return savepointDataFile;
  }


  /**
   * Sets the value of the 'savepointDataFile' field.
   * @param value the value to set.
   */
  public void setSavepointDataFile(java.util.List value) {
    this.savepointDataFile = value;
  }

  /**
   * Creates a new HoodieSavepointPartitionMetadata RecordBuilder.
   * @return A new HoodieSavepointPartitionMetadata RecordBuilder
   */
  public static org.apache.hudi.avro.model.HoodieSavepointPartitionMetadata.Builder newBuilder() {
    return new org.apache.hudi.avro.model.HoodieSavepointPartitionMetadata.Builder();
  }

  /**
   * Creates a new HoodieSavepointPartitionMetadata RecordBuilder by copying an existing Builder.
   * @param other The existing builder to copy.
   * @return A new HoodieSavepointPartitionMetadata RecordBuilder
   */
  public static org.apache.hudi.avro.model.HoodieSavepointPartitionMetadata.Builder newBuilder(org.apache.hudi.avro.model.HoodieSavepointPartitionMetadata.Builder other) {
    if (other == null) {
      return new org.apache.hudi.avro.model.HoodieSavepointPartitionMetadata.Builder();
    } else {
      return new org.apache.hudi.avro.model.HoodieSavepointPartitionMetadata.Builder(other);
    }
  }

  /**
   * Creates a new HoodieSavepointPartitionMetadata RecordBuilder by copying an existing HoodieSavepointPartitionMetadata instance.
   * @param other The existing instance to copy.
   * @return A new HoodieSavepointPartitionMetadata RecordBuilder
   */
  public static org.apache.hudi.avro.model.HoodieSavepointPartitionMetadata.Builder newBuilder(org.apache.hudi.avro.model.HoodieSavepointPartitionMetadata other) {
    if (other == null) {
      return new org.apache.hudi.avro.model.HoodieSavepointPartitionMetadata.Builder();
    } else {
      return new org.apache.hudi.avro.model.HoodieSavepointPartitionMetadata.Builder(other);
    }
  }

  /**
   * RecordBuilder for HoodieSavepointPartitionMetadata instances.
   */
  @org.apache.avro.specific.AvroGenerated
  public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase
    implements org.apache.avro.data.RecordBuilder {

    private java.lang.String partitionPath;
    private java.util.List savepointDataFile;

    /** Creates a new Builder */
    private Builder() {
      super(SCHEMA$, MODEL$);
    }

    /**
     * Creates a Builder by copying an existing Builder.
     * @param other The existing Builder to copy.
     */
    private Builder(org.apache.hudi.avro.model.HoodieSavepointPartitionMetadata.Builder other) {
      super(other);
      if (isValidValue(fields()[0], other.partitionPath)) {
        this.partitionPath = data().deepCopy(fields()[0].schema(), other.partitionPath);
        fieldSetFlags()[0] = other.fieldSetFlags()[0];
      }
      if (isValidValue(fields()[1], other.savepointDataFile)) {
        this.savepointDataFile = data().deepCopy(fields()[1].schema(), other.savepointDataFile);
        fieldSetFlags()[1] = other.fieldSetFlags()[1];
      }
    }

    /**
     * Creates a Builder by copying an existing HoodieSavepointPartitionMetadata instance
     * @param other The existing instance to copy.
     */
    private Builder(org.apache.hudi.avro.model.HoodieSavepointPartitionMetadata other) {
      super(SCHEMA$, MODEL$);
      if (isValidValue(fields()[0], other.partitionPath)) {
        this.partitionPath = data().deepCopy(fields()[0].schema(), other.partitionPath);
        fieldSetFlags()[0] = true;
      }
      if (isValidValue(fields()[1], other.savepointDataFile)) {
        this.savepointDataFile = data().deepCopy(fields()[1].schema(), other.savepointDataFile);
        fieldSetFlags()[1] = true;
      }
    }

    /**
      * Gets the value of the 'partitionPath' field.
      * @return The value.
      */
    public java.lang.String getPartitionPath() {
      return partitionPath;
    }


    /**
      * Sets the value of the 'partitionPath' field.
      * @param value The value of 'partitionPath'.
      * @return This builder.
      */
    public org.apache.hudi.avro.model.HoodieSavepointPartitionMetadata.Builder setPartitionPath(java.lang.String value) {
      validate(fields()[0], value);
      this.partitionPath = value;
      fieldSetFlags()[0] = true;
      return this;
    }

    /**
      * Checks whether the 'partitionPath' field has been set.
      * @return True if the 'partitionPath' field has been set, false otherwise.
      */
    public boolean hasPartitionPath() {
      return fieldSetFlags()[0];
    }


    /**
      * Clears the value of the 'partitionPath' field.
      * @return This builder.
      */
    public org.apache.hudi.avro.model.HoodieSavepointPartitionMetadata.Builder clearPartitionPath() {
      partitionPath = null;
      fieldSetFlags()[0] = false;
      return this;
    }

    /**
      * Gets the value of the 'savepointDataFile' field.
      * @return The value.
      */
    public java.util.List getSavepointDataFile() {
      return savepointDataFile;
    }


    /**
      * Sets the value of the 'savepointDataFile' field.
      * @param value The value of 'savepointDataFile'.
      * @return This builder.
      */
    public org.apache.hudi.avro.model.HoodieSavepointPartitionMetadata.Builder setSavepointDataFile(java.util.List value) {
      validate(fields()[1], value);
      this.savepointDataFile = value;
      fieldSetFlags()[1] = true;
      return this;
    }

    /**
      * Checks whether the 'savepointDataFile' field has been set.
      * @return True if the 'savepointDataFile' field has been set, false otherwise.
      */
    public boolean hasSavepointDataFile() {
      return fieldSetFlags()[1];
    }


    /**
      * Clears the value of the 'savepointDataFile' field.
      * @return This builder.
      */
    public org.apache.hudi.avro.model.HoodieSavepointPartitionMetadata.Builder clearSavepointDataFile() {
      savepointDataFile = null;
      fieldSetFlags()[1] = false;
      return this;
    }

    @Override
    @SuppressWarnings("unchecked")
    public HoodieSavepointPartitionMetadata build() {
      try {
        HoodieSavepointPartitionMetadata record = new HoodieSavepointPartitionMetadata();
        record.partitionPath = fieldSetFlags()[0] ? this.partitionPath : (java.lang.String) defaultValue(fields()[0]);
        record.savepointDataFile = fieldSetFlags()[1] ? this.savepointDataFile : (java.util.List) defaultValue(fields()[1]);
        return record;
      } catch (org.apache.avro.AvroMissingFieldException e) {
        throw e;
      } catch (java.lang.Exception e) {
        throw new org.apache.avro.AvroRuntimeException(e);
      }
    }
  }

  @SuppressWarnings("unchecked")
  private static final org.apache.avro.io.DatumWriter
    WRITER$ = (org.apache.avro.io.DatumWriter)MODEL$.createDatumWriter(SCHEMA$);

  @Override public void writeExternal(java.io.ObjectOutput out)
    throws java.io.IOException {
    WRITER$.write(this, SpecificData.getEncoder(out));
  }

  @SuppressWarnings("unchecked")
  private static final org.apache.avro.io.DatumReader
    READER$ = (org.apache.avro.io.DatumReader)MODEL$.createDatumReader(SCHEMA$);

  @Override public void readExternal(java.io.ObjectInput in)
    throws java.io.IOException {
    READER$.read(this, SpecificData.getDecoder(in));
  }

  @Override protected boolean hasCustomCoders() { return true; }

  @Override public void customEncode(org.apache.avro.io.Encoder out)
    throws java.io.IOException
  {
    out.writeString(this.partitionPath);

    long size0 = this.savepointDataFile.size();
    out.writeArrayStart();
    out.setItemCount(size0);
    long actualSize0 = 0;
    for (java.lang.String e0: this.savepointDataFile) {
      actualSize0++;
      out.startItem();
      out.writeString(e0);
    }
    out.writeArrayEnd();
    if (actualSize0 != size0)
      throw new java.util.ConcurrentModificationException("Array-size written was " + size0 + ", but element count was " + actualSize0 + ".");

  }

  @Override public void customDecode(org.apache.avro.io.ResolvingDecoder in)
    throws java.io.IOException
  {
    org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff();
    if (fieldOrder == null) {
      this.partitionPath = in.readString();

      long size0 = in.readArrayStart();
      java.util.List a0 = this.savepointDataFile;
      if (a0 == null) {
        a0 = new SpecificData.Array((int)size0, SCHEMA$.getField("savepointDataFile").schema());
        this.savepointDataFile = a0;
      } else a0.clear();
      SpecificData.Array ga0 = (a0 instanceof SpecificData.Array ? (SpecificData.Array)a0 : null);
      for ( ; 0 < size0; size0 = in.arrayNext()) {
        for ( ; size0 != 0; size0--) {
          java.lang.String e0 = (ga0 != null ? ga0.peek() : null);
          e0 = in.readString();
          a0.add(e0);
        }
      }

    } else {
      for (int i = 0; i < 2; i++) {
        switch (fieldOrder[i].pos()) {
        case 0:
          this.partitionPath = in.readString();
          break;

        case 1:
          long size0 = in.readArrayStart();
          java.util.List a0 = this.savepointDataFile;
          if (a0 == null) {
            a0 = new SpecificData.Array((int)size0, SCHEMA$.getField("savepointDataFile").schema());
            this.savepointDataFile = a0;
          } else a0.clear();
          SpecificData.Array ga0 = (a0 instanceof SpecificData.Array ? (SpecificData.Array)a0 : null);
          for ( ; 0 < size0; size0 = in.arrayNext()) {
            for ( ; size0 != 0; size0--) {
              java.lang.String e0 = (ga0 != null ? ga0.peek() : null);
              e0 = in.readString();
              a0.add(e0);
            }
          }
          break;

        default:
          throw new java.io.IOException("Corrupt ResolvingDecoder.");
        }
      }
    }
  }
}














© 2015 - 2025 Weber Informatics LLC | Privacy Policy